diff --git a/.babelrc b/.babelrc index cea726b873..c13c5f627f 100644 --- a/.babelrc +++ b/.babelrc @@ -1,3 +1,3 @@ -{ - "presets": ["es2015"], +{ + "presets": ["es2015"] } diff --git a/.eslintrc.json b/.eslintrc.json index b7bfd661ca..3b41a5ad9f 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,13 +1,13 @@ { - "parserOptions": { - "ecmaVersion": 6, - "sourceType": "module" - }, - "rules": { - "semi": 1, - "no-cond-assign": 2, - "no-debugger": 2, - "comma-dangle": 0, - "no-unreachable" : 2 - } + "parserOptions": { + "ecmaVersion": 6, + "sourceType": "module" + }, + "rules": { + "semi": 1, + "no-cond-assign": 2, + "no-debugger": 2, + "comma-dangle": 0, + "no-unreachable": 2 + } } diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 858d673868..28650e715f 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1 +1,2 @@ # Initial pre-commit reformat +1444737514c3e8c6acbe3fc01f9420c8e078d3a0 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 44cab3da2c..2dfbfbcc01 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -14,16 +14,15 @@ name: "CodeQL" on: push: - branches: [ 'master' ] + branches: ["master"] pull_request: # The branches below must be a subset of the branches above - branches: [ master ] + branches: [master] schedule: - - cron: '25 18 * * 4' + - cron: "25 18 * * 4" permissions: - security-events: - write + security-events: write jobs: analyze: @@ -33,41 +32,41 @@ jobs: strategy: fail-fast: false matrix: - language: [ 'python' ] + language: ["python"] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more... # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection steps: - - name: Checkout repository - uses: actions/checkout@v2 + - name: Checkout repository + uses: actions/checkout@v2 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - queries: security-and-quality + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + queries: security-and-quality - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v1 + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 - # ℹī¸ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl - # ✏ī¸ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language + # ✏ī¸ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language - #- run: | - # make bootstrap - # make release + #- run: | + # make bootstrap + # make release - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/downstream.yml b/.github/workflows/downstream.yml index dbef7cde58..56fc86d889 100644 --- a/.github/workflows/downstream.yml +++ b/.github/workflows/downstream.yml @@ -2,9 +2,9 @@ name: Test downstream projects on: push: - branches: '*' + branches: "*" pull_request: - branches: '*' + branches: "*" jobs: tests: @@ -12,35 +12,35 @@ jobs: timeout-minutes: 15 steps: - - name: Checkout - uses: actions/checkout@v2 + - name: Checkout + uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 - - name: Install dependencies - run: | - pip install --upgrade pip - pip install "." - pip install --pre --upgrade jupyterlab_server[test] jupyterlab[test] nbclassic[test] - pip freeze - - name: Run tests - working-directory: ../ - run: | - # NOTE: tests won't pass from inside the working copy because of - # conftest.py:pytest_plugins (must be at the top level) - pytest --pyargs jupyterlab_server - python -m jupyterlab.browser_check --no-browser-test + - name: Install dependencies + run: | + pip install --upgrade pip + pip install "." + pip install --pre --upgrade jupyterlab_server[test] jupyterlab[test] nbclassic[test] + pip freeze + - name: Run tests + working-directory: ../ + run: | + # NOTE: tests won't pass from inside the working copy because of + # conftest.py:pytest_plugins (must be at the top level) + pytest --pyargs jupyterlab_server + python -m jupyterlab.browser_check --no-browser-test - # Make sure we can start and kill the nbclassic server - jupyter nbclassic --no-browser & - TASK_PID=$! - # Make sure the task is running - ps -p $TASK_PID || exit 1 - sleep 5 - kill $TASK_PID - wait $TASK_PID - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 + # Make sure we can start and kill the nbclassic server + jupyter nbclassic --no-browser & + TASK_PID=$! + # Make sure the task is running + ps -p $TASK_PID || exit 1 + sleep 5 + kill $TASK_PID + wait $TASK_PID + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 3f1d410511..d800700983 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -1,9 +1,9 @@ name: Jupyter Server Integration Tests [Linux] on: push: - branches: 'master' + branches: "master" pull_request: - branches: '*' + branches: "*" jobs: build: runs-on: ${{ matrix.os }}-latest @@ -11,37 +11,37 @@ jobs: fail-fast: false matrix: os: [ubuntu] - python-version: [ '3.6', '3.7', '3.8', '3.9', '3.10-dev', 'pypy3' ] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10-dev", "pypy3"] steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Install Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - architecture: 'x64' - - name: Upgrade packaging dependencies - run: | - pip install --upgrade pip setuptools wheel --user - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - name: Cache pip - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} - restore-keys: | - ${{ runner.os }}-pip-${{ matrix.python-version }}- - ${{ runner.os }}-pip- - - name: Install the Python dependencies - run: | - pip install -e ".[test]" - - name: List installed packages - run: | - pip freeze - pip check - - name: Run the tests - run: | - pytest -vv --integration_tests=true + - name: Checkout + uses: actions/checkout@v2 + - name: Install Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + architecture: "x64" + - name: Upgrade packaging dependencies + run: | + pip install --upgrade pip setuptools wheel --user + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: Cache pip + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-${{ matrix.python-version }}- + ${{ runner.os }}-pip- + - name: Install the Python dependencies + run: | + pip install -e ".[test]" + - name: List installed packages + run: | + pip freeze + pip check + - name: Run the tests + run: | + pytest -vv --integration_tests=true diff --git a/.github/workflows/python-linux.yml b/.github/workflows/python-linux.yml index 81bc8d7bff..7e6ae01c07 100644 --- a/.github/workflows/python-linux.yml +++ b/.github/workflows/python-linux.yml @@ -1,9 +1,9 @@ name: Jupyter Server Tests [Linux] on: push: - branches: 'master' + branches: "master" pull_request: - branches: '*' + branches: "*" jobs: # Run "pre-commit run --all-files" pre-commit: @@ -35,60 +35,60 @@ jobs: fail-fast: false matrix: os: [ubuntu] - python-version: [ '3.6', '3.7', '3.8', '3.9', '3.10-dev', 'pypy3' ] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10-dev", "pypy3"] steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Install Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - architecture: 'x64' - - name: Upgrade packaging dependencies - run: | - pip install --upgrade pip setuptools wheel --user - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - name: Cache pip - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} - restore-keys: | - ${{ runner.os }}-pip-${{ matrix.python-version }}- - ${{ runner.os }}-pip- - - name: Install the Python dependencies - run: | - pip install -e ".[test]" codecov - - name: List installed packages - run: | - pip freeze - pip check - - name: Run the tests - if: ${{ matrix.python-version != 'pypy3' }} - run: | - pytest -vv --cov jupyter_server --cov-branch --cov-report term-missing:skip-covered - - name: Run the tests on pypy - if: ${{ matrix.python-version == 'pypy3' }} - run: | - pytest -vv - - name: Install the Python dependencies for the examples - run: | - cd examples/simple && pip install -e . - - name: Run the tests for the examples - run: | - pytest examples/simple/tests/test_handlers.py --confcutdir=$PWD - - name: Coverage - if: ${{ matrix.python-version != 'pypy3' }} - run: | - codecov - - name: Test full install - run: | - python -m venv test_install - ./test_install/bin/python -m pip install -U pip - ./test_install/bin/python -m pip install ".[test]" - pushd test_install - ./bin/pytest --pyargs jupyter_server - popd + - name: Checkout + uses: actions/checkout@v2 + - name: Install Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + architecture: "x64" + - name: Upgrade packaging dependencies + run: | + pip install --upgrade pip setuptools wheel --user + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: Cache pip + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-${{ matrix.python-version }}- + ${{ runner.os }}-pip- + - name: Install the Python dependencies + run: | + pip install -e ".[test]" codecov + - name: List installed packages + run: | + pip freeze + pip check + - name: Run the tests + if: ${{ matrix.python-version != 'pypy3' }} + run: | + pytest -vv --cov jupyter_server --cov-branch --cov-report term-missing:skip-covered + - name: Run the tests on pypy + if: ${{ matrix.python-version == 'pypy3' }} + run: | + pytest -vv + - name: Install the Python dependencies for the examples + run: | + cd examples/simple && pip install -e . + - name: Run the tests for the examples + run: | + pytest examples/simple/tests/test_handlers.py --confcutdir=$PWD + - name: Coverage + if: ${{ matrix.python-version != 'pypy3' }} + run: | + codecov + - name: Test full install + run: | + python -m venv test_install + ./test_install/bin/python -m pip install -U pip + ./test_install/bin/python -m pip install ".[test]" + pushd test_install + ./bin/pytest --pyargs jupyter_server + popd diff --git a/.github/workflows/python-macos.yml b/.github/workflows/python-macos.yml index b5b5e07e7b..81e0761256 100644 --- a/.github/workflows/python-macos.yml +++ b/.github/workflows/python-macos.yml @@ -1,9 +1,9 @@ name: Jupyter Server Tests [Mac OS] on: push: - branches: 'master' + branches: "master" pull_request: - branches: '*' + branches: "*" jobs: build: runs-on: ${{ matrix.os }}-latest @@ -11,52 +11,52 @@ jobs: fail-fast: false matrix: os: [macos] - python-version: [ '3.6', '3.7', '3.8', '3.9', '3.10-dev', 'pypy3' ] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10-dev", "pypy3"] steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Install Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - architecture: 'x64' - - name: Upgrade packaging dependencies - run: | - pip install --upgrade pip setuptools wheel --user - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - name: Cache pip - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} - restore-keys: | - ${{ runner.os }}-pip-${{ matrix.python-version }}- - ${{ runner.os }}-pip- - - name: Install the Python dependencies - run: | - pip install -e .[test] codecov - - name: List installed packages - run: | - pip freeze - pip check - - name: Run the tests - if: ${{ matrix.python-version != 'pypy3' }} - run: | - pytest -vv --cov jupyter_server --cov-branch --cov-report term-missing:skip-covered - - name: Run the tests on pypy - if: ${{ matrix.python-version == 'pypy3' }} - run: | - pytest -vv - - name: Install the Python dependencies for the examples - run: | - cd examples/simple && pip install -e . - - name: Run the tests for the examples - run: | - pytest examples/simple/tests/test_handlers.py --confcutdir=$PWD - - name: Coverage - if: ${{ matrix.python-version != 'pypy3' }} - run: | - codecov + - name: Checkout + uses: actions/checkout@v2 + - name: Install Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + architecture: "x64" + - name: Upgrade packaging dependencies + run: | + pip install --upgrade pip setuptools wheel --user + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: Cache pip + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-${{ matrix.python-version }}- + ${{ runner.os }}-pip- + - name: Install the Python dependencies + run: | + pip install -e .[test] codecov + - name: List installed packages + run: | + pip freeze + pip check + - name: Run the tests + if: ${{ matrix.python-version != 'pypy3' }} + run: | + pytest -vv --cov jupyter_server --cov-branch --cov-report term-missing:skip-covered + - name: Run the tests on pypy + if: ${{ matrix.python-version == 'pypy3' }} + run: | + pytest -vv + - name: Install the Python dependencies for the examples + run: | + cd examples/simple && pip install -e . + - name: Run the tests for the examples + run: | + pytest examples/simple/tests/test_handlers.py --confcutdir=$PWD + - name: Coverage + if: ${{ matrix.python-version != 'pypy3' }} + run: | + codecov diff --git a/.github/workflows/python-windows.yml b/.github/workflows/python-windows.yml index 0a8d68f231..535e5735cf 100644 --- a/.github/workflows/python-windows.yml +++ b/.github/workflows/python-windows.yml @@ -1,9 +1,9 @@ name: Jupyter Server Tests [Windows] on: push: - branches: 'master' + branches: "master" pull_request: - branches: '*' + branches: "*" jobs: build: runs-on: ${{ matrix.os }}-latest @@ -11,48 +11,48 @@ jobs: fail-fast: false matrix: os: [windows] - python-version: [ '3.6', '3.7', '3.8', '3.9' ] + python-version: ["3.6", "3.7", "3.8", "3.9"] steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Install Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - architecture: 'x64' - - name: Upgrade packaging dependencies - run: | - pip install --upgrade pip setuptools wheel --user - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - name: Cache pip - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} - restore-keys: | - ${{ runner.os }}-pip-${{ matrix.python-version }}- - ${{ runner.os }}-pip- - - name: Install the Python dependencies - run: | - pip install -e .[test] - - name: List installed packages - run: | - pip freeze - pip check - - name: Run the tests - run: | - # Disable capturing (-s) output from Pytest on Windows. - # For an unknown reason, capturing output interferes with - # the file descriptions opened by the asyncio IOLoop. - # This leads to a nasty, flaky race condition that we haven't - # been able to solve. - pytest -vv -s - - name: Install the Python dependencies for the examples - run: | - cd examples/simple && pip install -e . - - name: Run the tests for the examples - run: | - pytest examples/simple/tests/test_handlers.py --confcutdir=$PWD + - name: Checkout + uses: actions/checkout@v2 + - name: Install Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + architecture: "x64" + - name: Upgrade packaging dependencies + run: | + pip install --upgrade pip setuptools wheel --user + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: Cache pip + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-${{ matrix.python-version }}- + ${{ runner.os }}-pip- + - name: Install the Python dependencies + run: | + pip install -e .[test] + - name: List installed packages + run: | + pip freeze + pip check + - name: Run the tests + run: | + # Disable capturing (-s) output from Pytest on Windows. + # For an unknown reason, capturing output interferes with + # the file descriptions opened by the asyncio IOLoop. + # This leads to a nasty, flaky race condition that we haven't + # been able to solve. + pytest -vv -s + - name: Install the Python dependencies for the examples + run: | + cd examples/simple && pip install -e . + - name: Run the tests for the examples + run: | + pytest examples/simple/tests/test_handlers.py --confcutdir=$PWD diff --git a/.gitignore b/.gitignore index 5ff7515547..8d26500c3b 100644 --- a/.gitignore +++ b/.gitignore @@ -44,4 +44,3 @@ docs/source/other/changelog.md .history .vscode/* !.vscode/*.template - diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c0b8d9046..694598d8ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -270,8 +270,8 @@ All notable changes to this project will be documented in this file. **Merged pull requests:** -* Ensure jupyter config dir exists [#454](https://github.com/jupyter-server/jupyter_server/pull/454) ([@afshin](https://github.com/afshin)) -* Allow `pre_save_hook` to cancel save with `HTTPError` [#456](https://github.com/jupyter-server/jupyter_server/pull/456) ([@minrk](https://github.com/minrk)) +- Ensure jupyter config dir exists [#454](https://github.com/jupyter-server/jupyter_server/pull/454) ([@afshin](https://github.com/afshin)) +- Allow `pre_save_hook` to cancel save with `HTTPError` [#456](https://github.com/jupyter-server/jupyter_server/pull/456) ([@minrk](https://github.com/minrk)) **Contributors to this release:** @@ -310,10 +310,10 @@ All notable changes to this project will be documented in this file. **Merged pull requests:** -* Update README.md [#425](https://github.com/jupyter-server/jupyter_server/pull/425) ([@BobinMathew](https://github.com/BobinMathew)) -* Solve UnboundLocalError in launch_browser() [#421](https://github.com/jupyter-server/jupyter_server/pull/421) ([@jamesmishra](https://github.com/jamesmishra)) -* Add file_to_run to server extension docs [#420](https://github.com/jupyter-server/jupyter_server/pull/420) ([@Zsailer](https://github.com/Zsailer)) -* Remove outdated reference to _jupyter_server_extension_paths in docs [#419](https://github.com/jupyter-server/jupyter_server/pull/419) ([@Zsailer](https://github.com/Zsailer)) +- Update README.md [#425](https://github.com/jupyter-server/jupyter_server/pull/425) ([@BobinMathew](https://github.com/BobinMathew)) +- Solve UnboundLocalError in launch_browser() [#421](https://github.com/jupyter-server/jupyter_server/pull/421) ([@jamesmishra](https://github.com/jamesmishra)) +- Add file_to_run to server extension docs [#420](https://github.com/jupyter-server/jupyter_server/pull/420) ([@Zsailer](https://github.com/Zsailer)) +- Remove outdated reference to \_jupyter_server_extension_paths in docs [#419](https://github.com/jupyter-server/jupyter_server/pull/419) ([@Zsailer](https://github.com/Zsailer)) **Contributors to this release:** @@ -328,7 +328,7 @@ All notable changes to this project will be documented in this file. **Merged pull requests:** - Add Tests to Distribution [\#416](https://github.com/jupyter-server/jupyter_server/pull/416) ([afshin](https://github.com/afshin)) -- Enable extensions to control the file\_to\_run [\#415](https://github.com/jupyter-server/jupyter_server/pull/415) ([afshin](https://github.com/afshin)) +- Enable extensions to control the file_to_run [\#415](https://github.com/jupyter-server/jupyter_server/pull/415) ([afshin](https://github.com/afshin)) - add missing template for view.html [\#414](https://github.com/jupyter-server/jupyter_server/pull/414) ([minrk](https://github.com/minrk)) - Remove obsoleted asyncio-patch fixture [\#412](https://github.com/jupyter-server/jupyter_server/pull/412) ([kevin-bates](https://github.com/kevin-bates)) - Emit deprecation warning on old name [\#411](https://github.com/jupyter-server/jupyter_server/pull/411) ([fcollonval](https://github.com/fcollonval)) @@ -344,12 +344,12 @@ All notable changes to this project will be documented in this file. **Merged pull requests (includes those from broken 1.2.3 release):** - Special case ExtensionApp that starts the ServerApp [\#401](https://github.com/jupyter-server/jupyter_server/pull/401) ([afshin](https://github.com/afshin)) -- only use deprecated notebook\_dir config if root\_dir is not set [\#400](https://github.com/jupyter-server/jupyter_server/pull/400) ([minrk](https://github.com/minrk)) +- only use deprecated notebook_dir config if root_dir is not set [\#400](https://github.com/jupyter-server/jupyter_server/pull/400) ([minrk](https://github.com/minrk)) - Use async kernel manager by default [\#399](https://github.com/jupyter-server/jupyter_server/pull/399) ([kevin-bates](https://github.com/kevin-bates)) - Revert Session.username default value change [\#398](https://github.com/jupyter-server/jupyter_server/pull/398) ([mwakaba2](https://github.com/mwakaba2)) -- Re-enable default\_url in ExtensionApp [\#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) -- Enable notebook ContentsManager in jupyter\_server [\#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) -- Use jupyter\_server\_config.json as config file in the update password api [\#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) +- Re-enable default_url in ExtensionApp [\#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) +- Enable notebook ContentsManager in jupyter_server [\#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) +- Use jupyter_server_config.json as config file in the update password api [\#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) - Increase culling test idle timeout [\#388](https://github.com/jupyter-server/jupyter_server/pull/388) ([kevin-bates](https://github.com/kevin-bates)) - update changelog for 1.2.2 [\#387](https://github.com/jupyter-server/jupyter_server/pull/387) ([Zsailer](https://github.com/Zsailer)) @@ -361,9 +361,9 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Re-enable default\_url in ExtensionApp [\#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) -- Enable notebook ContentsManager in jupyter\_server [\#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) -- Use jupyter\_server\_config.json as config file in the update password api [\#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) +- Re-enable default_url in ExtensionApp [\#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) +- Enable notebook ContentsManager in jupyter_server [\#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) +- Use jupyter_server_config.json as config file in the update password api [\#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) - Increase culling test idle timeout [\#388](https://github.com/jupyter-server/jupyter_server/pull/388) ([kevin-bates](https://github.com/kevin-bates)) - update changelog for 1.2.2 [\#387](https://github.com/jupyter-server/jupyter_server/pull/387) ([Zsailer](https://github.com/Zsailer)) @@ -371,10 +371,10 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Apply missing ensure\_async to root session handler methods [\#386](https://github.com/jupyter-server/jupyter_server/pull/386) ([kevin-bates](https://github.com/kevin-bates)) +- Apply missing ensure_async to root session handler methods [\#386](https://github.com/jupyter-server/jupyter_server/pull/386) ([kevin-bates](https://github.com/kevin-bates)) - Update changelog to 1.2.1 [\#385](https://github.com/jupyter-server/jupyter_server/pull/385) ([Zsailer](https://github.com/Zsailer)) - Fix application exit [\#384](https://github.com/jupyter-server/jupyter_server/pull/384) ([afshin](https://github.com/afshin)) -- Replace secure\_write, is\_hidden, exists with jupyter\_core's [\#382](https://github.com/jupyter-server/jupyter_server/pull/382) ([kevin-bates](https://github.com/kevin-bates)) +- Replace secure_write, is_hidden, exists with jupyter_core's [\#382](https://github.com/jupyter-server/jupyter_server/pull/382) ([kevin-bates](https://github.com/kevin-bates)) - Add --autoreload flag [\#380](https://github.com/jupyter-server/jupyter_server/pull/380) ([afshin](https://github.com/afshin)) ## [1.2.1](https://github.com/jupyter-server/jupyter_server/tree/1.2.1) (2021-01-08) @@ -392,11 +392,11 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Flip default value for open\_browser in extensions [\#377](https://github.com/jupyter-server/jupyter_server/pull/377) ([ajbozarth](https://github.com/ajbozarth)) +- Flip default value for open_browser in extensions [\#377](https://github.com/jupyter-server/jupyter_server/pull/377) ([ajbozarth](https://github.com/ajbozarth)) - Improve Handling of the soft limit on open file handles [\#376](https://github.com/jupyter-server/jupyter_server/pull/376) ([afshin](https://github.com/afshin)) -- Handle open\_browser trait in ServerApp and ExtensionApp differently [\#375](https://github.com/jupyter-server/jupyter_server/pull/375) ([afshin](https://github.com/afshin)) +- Handle open_browser trait in ServerApp and ExtensionApp differently [\#375](https://github.com/jupyter-server/jupyter_server/pull/375) ([afshin](https://github.com/afshin)) - Add setting to disable redirect file browser launch [\#374](https://github.com/jupyter-server/jupyter_server/pull/374) ([afshin](https://github.com/afshin)) -- Make trust handle use ensure\_async [\#373](https://github.com/jupyter-server/jupyter_server/pull/373) ([vidartf](https://github.com/vidartf)) +- Make trust handle use ensure_async [\#373](https://github.com/jupyter-server/jupyter_server/pull/373) ([vidartf](https://github.com/vidartf)) ## [1.1.4](https://github.com/jupyter-server/jupyter_server/tree/1.1.4) (2021-01-04) @@ -406,7 +406,7 @@ This was a broken release and was yanked from PyPI. - Update the link to paths documentation [\#371](https://github.com/jupyter-server/jupyter_server/pull/371) ([krassowski](https://github.com/krassowski)) - IPythonHandler -\> JupyterHandler [\#370](https://github.com/jupyter-server/jupyter_server/pull/370) ([krassowski](https://github.com/krassowski)) -- use setuptools find\_packages, exclude tests, docs and examples from dist [\#368](https://github.com/jupyter-server/jupyter_server/pull/368) ([bollwyvl](https://github.com/bollwyvl)) +- use setuptools find_packages, exclude tests, docs and examples from dist [\#368](https://github.com/jupyter-server/jupyter_server/pull/368) ([bollwyvl](https://github.com/bollwyvl)) - Update serverapp.py [\#367](https://github.com/jupyter-server/jupyter_server/pull/367) ([michaelaye](https://github.com/michaelaye)) ## [1.1.3](https://github.com/jupyter-server/jupyter_server/tree/1.1.3) (2020-12-23) @@ -415,7 +415,7 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Culling: ensure last\_activity attr exists before use [\#365](https://github.com/jupyter-server/jupyter_server/pull/365) ([afshin](https://github.com/afshin)) +- Culling: ensure last_activity attr exists before use [\#365](https://github.com/jupyter-server/jupyter_server/pull/365) ([afshin](https://github.com/afshin)) ## [1.1.2](https://github.com/jupyter-server/jupyter_server/tree/1.1.2) (2020-12-21) @@ -431,7 +431,7 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Fix: await possible async dir\_exists method [\#363](https://github.com/jupyter-server/jupyter_server/pull/363) ([mwakaba2](https://github.com/mwakaba2)) +- Fix: await possible async dir_exists method [\#363](https://github.com/jupyter-server/jupyter_server/pull/363) ([mwakaba2](https://github.com/mwakaba2)) ## 1.1.0 (2020-12-11) @@ -441,14 +441,14 @@ This was a broken release and was yanked from PyPI. - Restore pytest plugin from pytest-jupyter [\#360](https://github.com/jupyter-server/jupyter_server/pull/360) ([kevin-bates](https://github.com/kevin-bates)) - Fix upgrade packaging dependencies build step [\#354](https://github.com/jupyter-server/jupyter_server/pull/354) ([mwakaba2](https://github.com/mwakaba2)) -- Await \_connect and inline read\_messages callback to \_connect [\#350](https://github.com/jupyter-server/jupyter_server/pull/350) ([ricklamers](https://github.com/ricklamers)) +- Await \_connect and inline read_messages callback to \_connect [\#350](https://github.com/jupyter-server/jupyter_server/pull/350) ([ricklamers](https://github.com/ricklamers)) - Update release instructions and dev version [\#348](https://github.com/jupyter-server/jupyter_server/pull/348) ([kevin-bates](https://github.com/kevin-bates)) -- Fix test\_trailing\_slash [\#346](https://github.com/jupyter-server/jupyter_server/pull/346) ([kevin-bates](https://github.com/kevin-bates)) +- Fix test_trailing_slash [\#346](https://github.com/jupyter-server/jupyter_server/pull/346) ([kevin-bates](https://github.com/kevin-bates)) - Apply security advisory fix to master [\#345](https://github.com/jupyter-server/jupyter_server/pull/345) ([kevin-bates](https://github.com/kevin-bates)) - Allow toggling auth for prometheus metrics [\#344](https://github.com/jupyter-server/jupyter_server/pull/344) ([yuvipanda](https://github.com/yuvipanda)) - Port Notebook PRs 5565 and 5588 - terminal shell heuristics [\#343](https://github.com/jupyter-server/jupyter_server/pull/343) ([kevin-bates](https://github.com/kevin-bates)) - Port gateway updates from notebook \(PRs 5317 and 5484\) [\#341](https://github.com/jupyter-server/jupyter_server/pull/341) ([kevin-bates](https://github.com/kevin-bates)) -- add check\_origin handler to gateway WebSocketChannelsHandler [\#340](https://github.com/jupyter-server/jupyter_server/pull/340) ([ricklamers](https://github.com/ricklamers)) +- add check_origin handler to gateway WebSocketChannelsHandler [\#340](https://github.com/jupyter-server/jupyter_server/pull/340) ([ricklamers](https://github.com/ricklamers)) - Remove pytest11 entrypoint and plugin, require tornado 6.1, remove asyncio patch, CI work [\#339](https://github.com/jupyter-server/jupyter_server/pull/339) ([bollwyvl](https://github.com/bollwyvl)) - Switch fixtures to use those in pytest-jupyter to avoid collisions [\#335](https://github.com/jupyter-server/jupyter_server/pull/335) ([kevin-bates](https://github.com/kevin-bates)) - Enable CodeQL runs on all pushed branches [\#333](https://github.com/jupyter-server/jupyter_server/pull/333) ([kevin-bates](https://github.com/kevin-bates)) @@ -466,32 +466,33 @@ This was a broken release and was yanked from PyPI. ### Added. -* Added a basic, styled `login.html` template. ([220](https://github.com/jupyter/jupyter_server/pull/220), [295](https://github.com/jupyter/jupyter_server/pull/295)) -* Added new extension manager API for handling server extensions. ([248](https://github.com/jupyter/jupyter_server/pull/248), [265](https://github.com/jupyter/jupyter_server/pull/265), [275](https://github.com/jupyter/jupyter_server/pull/275), [303](https://github.com/jupyter/jupyter_server/pull/303)) -* The favicon and Jupyter logo are now available under jupyter_server's static namespace. ([284](https://github.com/jupyter/jupyter_server/pull/284)) +- Added a basic, styled `login.html` template. ([220](https://github.com/jupyter/jupyter_server/pull/220), [295](https://github.com/jupyter/jupyter_server/pull/295)) +- Added new extension manager API for handling server extensions. ([248](https://github.com/jupyter/jupyter_server/pull/248), [265](https://github.com/jupyter/jupyter_server/pull/265), [275](https://github.com/jupyter/jupyter_server/pull/275), [303](https://github.com/jupyter/jupyter_server/pull/303)) +- The favicon and Jupyter logo are now available under jupyter_server's static namespace. ([284](https://github.com/jupyter/jupyter_server/pull/284)) ### Changed. -* `load_jupyter_server_extension` should be renamed to `_load_jupyter_server_extension` in server extensions. Server now throws a warning when the old name is used. ([213](https://github.com/jupyter/jupyter_server/pull/213)) -* Docs for server extensions now recommend using `authenticated` decorator for handlers. ([219](https://github.com/jupyter/jupyter_server/pull/219)) -* `_load_jupyter_server_paths` should be renamed to `_load_jupyter_server_points` in server extensions. ([277](https://github.com/jupyter/jupyter_server/pull/277)) -* `static_url_prefix` in ExtensionApps is now a configurable trait. ([289](https://github.com/jupyter/jupyter_server/pull/289)) -* `extension_name` trait was removed in favor of `name`. ([232](https://github.com/jupyter/jupyter_server/pull/232)) -* Dropped support for Python 3.5. ([296](https://github.com/jupyter/jupyter_server/pull/296)) -* Made the `config_dir_name` trait configurable in `ConfigManager`. ([297](https://github.com/jupyter/jupyter_server/pull/297)) +- `load_jupyter_server_extension` should be renamed to `_load_jupyter_server_extension` in server extensions. Server now throws a warning when the old name is used. ([213](https://github.com/jupyter/jupyter_server/pull/213)) +- Docs for server extensions now recommend using `authenticated` decorator for handlers. ([219](https://github.com/jupyter/jupyter_server/pull/219)) +- `_load_jupyter_server_paths` should be renamed to `_load_jupyter_server_points` in server extensions. ([277](https://github.com/jupyter/jupyter_server/pull/277)) +- `static_url_prefix` in ExtensionApps is now a configurable trait. ([289](https://github.com/jupyter/jupyter_server/pull/289)) +- `extension_name` trait was removed in favor of `name`. ([232](https://github.com/jupyter/jupyter_server/pull/232)) +- Dropped support for Python 3.5. ([296](https://github.com/jupyter/jupyter_server/pull/296)) +- Made the `config_dir_name` trait configurable in `ConfigManager`. ([297](https://github.com/jupyter/jupyter_server/pull/297)) ### Removed for now removed features. -* Removed ipykernel as a dependency of jupyter_server. ([255](https://github.com/jupyter/jupyter_server/pull/255)) +- Removed ipykernel as a dependency of jupyter_server. ([255](https://github.com/jupyter/jupyter_server/pull/255)) ### Fixed for any bug fixes. -* Prevent a re-definition of prometheus metrics if `notebook` package already imports them. ([#210](https://github.com/jupyter/jupyter_server/pull/210)) -* Fixed `terminals` REST API unit tests that weren't shutting down properly. ([221](https://github.com/jupyter/jupyter_server/pull/221)) -* Fixed jupyter_server on Windows for Python < 3.7. Added patch to handle subprocess cleanup. ([240](https://github.com/jupyter/jupyter_server/pull/240)) -* `base_url` was being duplicated when getting a url path from the `ServerApp`. ([280](https://github.com/jupyter/jupyter_server/pull/280)) -* Extension URLs are now properly prefixed with `base_url`. Previously, all `static` paths were not. ([285](https://github.com/jupyter/jupyter_server/pull/285)) -* Changed ExtensionApp mixin to inherit from `HasTraits`. This broke in traitlets 5.0 ([294](https://github.com/jupyter/jupyter_server/pull/294)) -* Replaces `urlparse` with `url_path_join` to prevent URL squashing issues. ([304](https://github.com/jupyter/jupyter_server/pull/304)) + +- Prevent a re-definition of prometheus metrics if `notebook` package already imports them. ([#210](https://github.com/jupyter/jupyter_server/pull/210)) +- Fixed `terminals` REST API unit tests that weren't shutting down properly. ([221](https://github.com/jupyter/jupyter_server/pull/221)) +- Fixed jupyter_server on Windows for Python < 3.7. Added patch to handle subprocess cleanup. ([240](https://github.com/jupyter/jupyter_server/pull/240)) +- `base_url` was being duplicated when getting a url path from the `ServerApp`. ([280](https://github.com/jupyter/jupyter_server/pull/280)) +- Extension URLs are now properly prefixed with `base_url`. Previously, all `static` paths were not. ([285](https://github.com/jupyter/jupyter_server/pull/285)) +- Changed ExtensionApp mixin to inherit from `HasTraits`. This broke in traitlets 5.0 ([294](https://github.com/jupyter/jupyter_server/pull/294)) +- Replaces `urlparse` with `url_path_join` to prevent URL squashing issues. ([304](https://github.com/jupyter/jupyter_server/pull/304)) ## [0.3] - 2020-4-22 @@ -514,48 +515,53 @@ This was a broken release and was yanked from PyPI. ### Added - **pytest-plugin** for Jupyter Server. - - Allows one to write async/await syntax in tests functions. - - Some particularly useful fixtures include: - - `serverapp`: a default ServerApp instance that handles setup+teardown. - - `configurable_serverapp`: a function that returns a ServerApp instance. - - `fetch`: an awaitable function that tests makes requests to the server API - - `create_notebook`: a function that writes a notebook to a given temporary file path. + - Allows one to write async/await syntax in tests functions. + - Some particularly useful fixtures include: + - `serverapp`: a default ServerApp instance that handles setup+teardown. + - `configurable_serverapp`: a function that returns a ServerApp instance. + - `fetch`: an awaitable function that tests makes requests to the server API + - `create_notebook`: a function that writes a notebook to a given temporary file path. ## [0.2.0] - 2019-12-19 ### Added + - `extension` submodule ([#48](https://github.com/jupyter/jupyter_server/pull/48)) - - ExtensionApp - configurable JupyterApp-subclass for server extensions - - Most useful for Jupyter frontends, like Notebook, JupyterLab, nteract, voila etc. - - Launch with entrypoints - - Configure from file or CLI - - Add custom templates, static assets, handlers, etc. - - Static assets are served behind a `/static/` endpoint. - - Run server extensions in "standalone mode" ([#70](https://github.com/jupyter/jupyter_server/pull/70) and [#76](https://github.com/jupyter/jupyter_server/pull/76)) - - ExtensionHandler - tornado handlers for extensions. - - Finds static assets at `/static/` + - ExtensionApp - configurable JupyterApp-subclass for server extensions + - Most useful for Jupyter frontends, like Notebook, JupyterLab, nteract, voila etc. + - Launch with entrypoints + - Configure from file or CLI + - Add custom templates, static assets, handlers, etc. + - Static assets are served behind a `/static/` endpoint. + - Run server extensions in "standalone mode" ([#70](https://github.com/jupyter/jupyter_server/pull/70) and [#76](https://github.com/jupyter/jupyter_server/pull/76)) + - ExtensionHandler - tornado handlers for extensions. + - Finds static assets at `/static/` ### Changed + - `jupyter serverextension ` entrypoint has been changed to `jupyter server extension `. - `toggle_jupyter_server` and `validate_jupyter_server` function no longer take a Logger object as an argument. - Changed testing framework from nosetests to pytest ([#152](https://github.com/jupyter/jupyter_server/pull/152)) - - Depend on pytest-tornasync extension for handling tornado/asyncio eventloop - - Depend on pytest-console-scripts for testing CLI entrypoints + - Depend on pytest-tornasync extension for handling tornado/asyncio eventloop + - Depend on pytest-console-scripts for testing CLI entrypoints - Added Github actions as a testing framework along side Travis and Azure ([#146](https://github.com/jupyter/jupyter_server/pull/146)) ### Removed + - Removed the option to update `root_dir` trait in FileContentsManager and MappingKernelManager in ServerApp ([#135](https://github.com/jupyter/jupyter_server/pull/135)) ### Fixed + - Synced Jupyter Server with Notebook PRs in batches (ended on 2019-09-27) - - [Batch 1](https://github.com/jupyter/jupyter_server/pull/95) - - [Batch 2](https://github.com/jupyter/jupyter_server/pull/97) - - [Batch 3](https://github.com/jupyter/jupyter_server/pull/98) - - [Batch 4](https://github.com/jupyter/jupyter_server/pull/99) - - [Batch 5](https://github.com/jupyter/jupyter_server/pull/103) - - [Batch 6](https://github.com/jupyter/jupyter_server/pull/104) - - [Batch 7](https://github.com/jupyter/jupyter_server/pull/105) - - [Batch 8](https://github.com/jupyter/jupyter_server/pull/106) + - [Batch 1](https://github.com/jupyter/jupyter_server/pull/95) + - [Batch 2](https://github.com/jupyter/jupyter_server/pull/97) + - [Batch 3](https://github.com/jupyter/jupyter_server/pull/98) + - [Batch 4](https://github.com/jupyter/jupyter_server/pull/99) + - [Batch 5](https://github.com/jupyter/jupyter_server/pull/103) + - [Batch 6](https://github.com/jupyter/jupyter_server/pull/104) + - [Batch 7](https://github.com/jupyter/jupyter_server/pull/105) + - [Batch 8](https://github.com/jupyter/jupyter_server/pull/106) ### Security + - Added a "secure_write to function for cookie/token saves ([#77](https://github.com/jupyter/jupyter_server/pull/77)) diff --git a/COPYING.md b/COPYING.md index bd6397d458..7cfb970db8 100644 --- a/COPYING.md +++ b/COPYING.md @@ -25,7 +25,7 @@ software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER @@ -47,8 +47,8 @@ Jupyter uses a shared copyright model. Each contributor maintains copyright over their contributions to Jupyter. But, it is important to note that these contributions are typically only changes to the repositories. Thus, the Jupyter source code, in its entirety is not the copyright of any single person or -institution. Instead, it is the collective copyright of the entire Jupyter -Development Team. If individual contributors want to maintain a record of what +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what changes/contributions they have specific copyright on, they should indicate their copyright in the commit message of the change, when they commit the change to one of the Jupyter repositories. diff --git a/README.md b/README.md index e3ab6f4fd6..2f03b94bf2 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,6 @@ When a new minor version is released on PyPI, a branch for that version will be To see the changes between releases, checkout the [CHANGELOG](https://github.com/jupyter/jupyter_server/blob/master/CHANGELOG.md). - ## Usage - Running Jupyter Server ### Running in a local installation @@ -43,8 +42,8 @@ If you are interested in contributing to the project, see [`CONTRIBUTING.rst`](C ## Team Meetings and Roadmap -* When: Thursdays [8:00am, Pacific time](https://www.thetimezoneconverter.com/?t=8%3A00%20am&tz=San%20Francisco&) -* Where: [Jovyan Zoom](https://zoom.us/my/jovyan?pwd=c0JZTHlNdS9Sek9vdzR3aTJ4SzFTQT09) -* What: [Meeting notes](https://github.com/jupyter-server/team-compass/issues/4) +- When: Thursdays [8:00am, Pacific time](https://www.thetimezoneconverter.com/?t=8%3A00%20am&tz=San%20Francisco&) +- Where: [Jovyan Zoom](https://zoom.us/my/jovyan?pwd=c0JZTHlNdS9Sek9vdzR3aTJ4SzFTQT09) +- What: [Meeting notes](https://github.com/jupyter-server/team-compass/issues/4) See our tentative [roadmap here](https://github.com/jupyter/jupyter_server/issues/127). diff --git a/conftest.py b/conftest.py index c461afc623..e1a9fe0dd0 100644 --- a/conftest.py +++ b/conftest.py @@ -1,9 +1,7 @@ import pytest -pytest_plugins = [ - "jupyter_server.pytest_plugin" -] +pytest_plugins = ["jupyter_server.pytest_plugin"] def pytest_addoption(parser): @@ -17,9 +15,7 @@ def pytest_addoption(parser): def pytest_configure(config): # register an additional marker - config.addinivalue_line( - "markers", "integration_test" - ) + config.addinivalue_line("markers", "integration_test") def pytest_runtest_setup(item): @@ -30,4 +26,6 @@ def pytest_runtest_setup(item): pytest.skip("Only running tests marked as 'integration_test'.") else: if is_integration_test: - pytest.skip("Skipping this test because it's marked 'integration_test'. Run integration tests using the `--integration_tests` flag.") + pytest.skip( + "Skipping this test because it's marked 'integration_test'. Run integration tests using the `--integration_tests` flag." + ) diff --git a/docs/autogen_config.py b/docs/autogen_config.py index 37ee1875da..2892dfebe9 100644 --- a/docs/autogen_config.py +++ b/docs/autogen_config.py @@ -1,6 +1,6 @@ #!/usr/bin/env python - import os + from jupyter_server.serverapp import ServerApp header = """\ @@ -36,10 +36,10 @@ """ try: - destination = os.path.join(os.path.dirname(__file__), 'source/other/full-config.rst') + destination = os.path.join(os.path.dirname(__file__), "source/other/full-config.rst") except: - destination = os.path.join(os.getcwd(), 'full-config.rst') + destination = os.path.join(os.getcwd(), "full-config.rst") -with open(destination, 'w') as f: +with open(destination, "w") as f: f.write(header) f.write(ServerApp().document_config_options()) diff --git a/docs/doc-requirements.txt b/docs/doc-requirements.txt index 189bf8f566..4bf44e12ee 100644 --- a/docs/doc-requirements.txt +++ b/docs/doc-requirements.txt @@ -1,12 +1,12 @@ +ipykernel jinja2 -tornado jupyter_client -ipykernel +myst-parser nbformat -Send2Trash prometheus_client -sphinxcontrib_github_alt +pydata_sphinx_theme +Send2Trash sphinxcontrib-openapi +sphinxcontrib_github_alt sphinxemoji -myst-parser -pydata_sphinx_theme +tornado diff --git a/docs/environment.yml b/docs/environment.yml index 0012f8f5ed..95122109fe 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -1,7 +1,7 @@ name: jupyter_server_docs dependencies: -- nodejs=14 -- python=3.8 -- pip -- pip: - - -r doc-requirements.txt + - nodejs=14 + - python=3.8 + - pip + - pip: + - -r doc-requirements.txt diff --git a/docs/source/conf.py b/docs/source/conf.py index b9be17d3eb..99a0c98d3a 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -12,11 +12,10 @@ # # All configuration values have a default; values that are commented out # serve to show the default. - -import sys import os import os.path as osp import shutil +import sys HERE = osp.abspath(osp.dirname(__file__)) @@ -51,54 +50,54 @@ # If so, generate a config.rst file and populate it with documentation about # configuration options -if os.environ.get('READTHEDOCS', ''): +if os.environ.get("READTHEDOCS", ""): # Readthedocs doesn't run our Makefile, so we do this to force it to generate # the config docs. - with open('../autogen_config.py') as f: - exec(compile(f.read(), '../autogen_config.py', 'exec'), {}) + with open("../autogen_config.py") as f: + exec(compile(f.read(), "../autogen_config.py", "exec"), {}) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'myst_parser', - 'sphinx.ext.autodoc', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.autosummary', - 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting', - 'sphinxcontrib_github_alt', - 'sphinxcontrib.openapi', - 'sphinxemoji.sphinxemoji' + "myst_parser", + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.autosummary", + "sphinx.ext.mathjax", + "IPython.sphinxext.ipython_console_highlighting", + "sphinxcontrib_github_alt", + "sphinxcontrib.openapi", + "sphinxemoji.sphinxemoji", ] myst_enable_extensions = ["html_image"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = ['.rst', '.ipynb'] +source_suffix = [".rst", ".ipynb"] # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'Jupyter Server' -copyright = '2020, Jupyter Team, https://jupyter.org' -author = 'The Jupyter Team' +project = "Jupyter Server" +copyright = "2020, Jupyter Team, https://jupyter.org" +author = "The Jupyter Team" # ghissue config github_project_url = "https://github.com/jupyter/jupyter_server" @@ -107,13 +106,13 @@ # |version| and |release|, also used in various other places throughout the # built documents. # -_version_py = '../../jupyter_server/_version.py' +_version_py = "../../jupyter_server/_version.py" version_ns = {} -exec(compile(open(_version_py).read(), _version_py, 'exec'), version_ns) +exec(compile(open(_version_py).read(), _version_py, "exec"), version_ns) # The short X.Y version. -version = '%i.%i' % version_ns['version_info'][:2] +version = "%i.%i" % version_ns["version_info"][:2] # The full version, including alpha/beta/rc tags. -release = version_ns['__version__'] +release = version_ns["__version__"] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -124,9 +123,9 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -134,28 +133,28 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'default' -#highlight_language = 'python3' +pygments_style = "default" +# highlight_language = 'python3' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -179,33 +178,33 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -#html_theme = 'sphinx_rtd_theme' +# html_theme = 'sphinx_rtd_theme' html_theme = "pydata_sphinx_theme" html_logo = "_static/jupyter_server_logo.svg" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -213,134 +212,131 @@ # NOTE: Sphinx's 'make html' builder will throw a warning about an unfound # _static directory. Do not remove or comment out html_static_path # since it is needed to properly generate _static in the build directory -html_static_path = ['_static'] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'JupyterServerdoc' +htmlhelp_basename = "JupyterServerdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'JupyterServer.tex', 'Jupyter Server Documentation', - 'https://jupyter.org', 'manual'), + ( + master_doc, + "JupyterServer.tex", + "Jupyter Server Documentation", + "https://jupyter.org", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'jupyterserver', 'Jupyter Server Documentation', - [author], 1) -] +man_pages = [(master_doc, "jupyterserver", "Jupyter Server Documentation", [author], 1)] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for link checks ---------------------------------------------- -linkcheck_ignore = [ - 'http://127\.0\.0\.1/*' -] +linkcheck_ignore = ["http://127\.0\.0\.1/*"] # -- Options for Texinfo output ------------------------------------------- @@ -349,37 +345,43 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'JupyterServer', 'Jupyter Server Documentation', - author, 'JupyterServer', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "JupyterServer", + "Jupyter Server Documentation", + author, + "JupyterServer", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False intersphinx_mapping = { - 'ipython': ('https://ipython.readthedocs.io/en/stable/', None), - 'nbconvert': ('https://nbconvert.readthedocs.io/en/latest/', None), - 'nbformat': ('https://nbformat.readthedocs.io/en/latest/', None), - 'jupyter': ('https://jupyter.readthedocs.io/en/latest/', None) + "ipython": ("https://ipython.readthedocs.io/en/stable/", None), + "nbconvert": ("https://nbconvert.readthedocs.io/en/latest/", None), + "nbformat": ("https://nbformat.readthedocs.io/en/latest/", None), + "jupyter": ("https://jupyter.readthedocs.io/en/latest/", None), } -spelling_lang='en_US' -spelling_word_list_filename='spelling_wordlist.txt' +spelling_lang = "en_US" +spelling_word_list_filename = "spelling_wordlist.txt" # import before any doc is built, so _ is guaranteed to be injected import jupyter_server.transutils # pylint: disable=unused-import def setup(app): - dest = osp.join(HERE, 'other', 'changelog.md') - shutil.copy(osp.join(HERE, '..', '..', 'CHANGELOG.md'), dest) + dest = osp.join(HERE, "other", "changelog.md") + shutil.copy(osp.join(HERE, "..", "..", "CHANGELOG.md"), dest) diff --git a/docs/source/contributors/team-meetings.rst b/docs/source/contributors/team-meetings.rst index 83fa326427..947142f2d7 100644 --- a/docs/source/contributors/team-meetings.rst +++ b/docs/source/contributors/team-meetings.rst @@ -18,4 +18,4 @@ Jupyter Calendar: .. raw:: html - \ No newline at end of file + diff --git a/docs/source/developers/contents.rst b/docs/source/developers/contents.rst index a0a0296fc4..28d2a33334 100644 --- a/docs/source/developers/contents.rst +++ b/docs/source/developers/contents.rst @@ -287,4 +287,3 @@ An asynchronous version of the Contents API is available to run slow IO processe However, if the Jupyter Notebook web application is interacting with a high-latent virtual filesystem, you may see performance gains by using the asynchronous version. For example, if you're experiencing terminal lag in the web application due to the slow and blocking file operations, the asynchronous version can reduce the lag. Before opting in, comparing both non-async and async options' performances is recommended. - diff --git a/docs/source/developers/rest-api.rst b/docs/source/developers/rest-api.rst index 44269b869e..ab5b627b15 100644 --- a/docs/source/developers/rest-api.rst +++ b/docs/source/developers/rest-api.rst @@ -4,4 +4,4 @@ The REST API An interactive version is available `here `_. -.. openapi:: ../../../jupyter_server/services/api/api.yaml \ No newline at end of file +.. openapi:: ../../../jupyter_server/services/api/api.yaml diff --git a/docs/source/index.rst b/docs/source/index.rst index 67172cf077..4594e86bfd 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -43,4 +43,4 @@ Table of Contents Operators Developers Contributors - Other \ No newline at end of file + Other diff --git a/docs/source/operators/configuring-extensions.rst b/docs/source/operators/configuring-extensions.rst index 0c8cba3d4a..839ae7214b 100644 --- a/docs/source/operators/configuring-extensions.rst +++ b/docs/source/operators/configuring-extensions.rst @@ -56,4 +56,4 @@ This will also work with any extension entrypoints that allow other extensions t .. code-block:: console - > jupyter myextension --ServerApp.port=9999 --MyExtension1.trait=False --MyExtension2.trait=True \ No newline at end of file + > jupyter myextension --ServerApp.port=9999 --MyExtension1.trait=False --MyExtension2.trait=True diff --git a/docs/source/operators/index.rst b/docs/source/operators/index.rst index a654be1a0c..41354bce73 100644 --- a/docs/source/operators/index.rst +++ b/docs/source/operators/index.rst @@ -12,4 +12,4 @@ These pages are targeted at people using, configuring, and/or deploying multiple configuring-extensions migrate-from-nbserver public-server - security \ No newline at end of file + security diff --git a/docs/source/operators/migrate-from-nbserver.rst b/docs/source/operators/migrate-from-nbserver.rst index 087ed90f63..d635d3b1ef 100644 --- a/docs/source/operators/migrate-from-nbserver.rst +++ b/docs/source/operators/migrate-from-nbserver.rst @@ -33,4 +33,4 @@ If you want to switch to Jupyter Server, but you still want to serve `Jupyter No NBClassic is a Jupyter Server extension that serves the Notebook frontend (i.e. all static assets) on top of Jupyter Server. It even loads Jupyter Notebook's config files. -.. warning:: NBClassic will only work for a limited time. Jupyter Server is likely to evolve beyond a point where Jupyter Notebook frontend will no longer work with the underlying server. Consider switching to `JupyterLab `_ or `nteract `_ where there is active development happening. \ No newline at end of file +.. warning:: NBClassic will only work for a limited time. Jupyter Server is likely to evolve beyond a point where Jupyter Notebook frontend will no longer work with the underlying server. Consider switching to `JupyterLab `_ or `nteract `_ where there is active development happening. diff --git a/docs/source/operators/security.rst b/docs/source/operators/security.rst index 6479fb3a9c..5f69c334b9 100644 --- a/docs/source/operators/security.rst +++ b/docs/source/operators/security.rst @@ -217,4 +217,4 @@ To share a signatures database among users, you can configure: c.NotebookNotary.data_dir = "/path/to/signature_dir" to specify a non-default path to the SQLite database (of notebook hashes, -essentially). \ No newline at end of file +essentially). diff --git a/docs/source/other/faq.rst b/docs/source/other/faq.rst index 507eea942b..db9e2634df 100644 --- a/docs/source/other/faq.rst +++ b/docs/source/other/faq.rst @@ -11,4 +11,3 @@ Can I configure multiple extensions at once? -------------------------------------------- Checkout our "Operator" docs on how to :ref:`configure extensions `. |:closed_book:| - diff --git a/docs/source/other/full-config.rst b/docs/source/other/full-config.rst index af92c3099f..55d708a5cb 100644 --- a/docs/source/other/full-config.rst +++ b/docs/source/other/full-config.rst @@ -1388,4 +1388,3 @@ GatewayClient.ws_url : Unicode The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) - diff --git a/docs/source/users/configuration.rst b/docs/source/users/configuration.rst index 2e150c8cb4..b6ede5b28b 100644 --- a/docs/source/users/configuration.rst +++ b/docs/source/users/configuration.rst @@ -65,4 +65,4 @@ Alternatively, you can configure Jupyter Server when launching from the command Full configuration list ----------------------- -See the full list of configuration options for the server :ref:`here `. \ No newline at end of file +See the full list of configuration options for the server :ref:`here `. diff --git a/docs/source/users/help.rst b/docs/source/users/help.rst index a149d40e9a..b290e1bb07 100644 --- a/docs/source/users/help.rst +++ b/docs/source/users/help.rst @@ -5,4 +5,4 @@ Getting Help If you run into any issues or bugs, please open an `issue on Github `_. -We'd also love to have you come by our :ref:`Team Meetings `. \ No newline at end of file +We'd also love to have you come by our :ref:`Team Meetings `. diff --git a/docs/source/users/index.rst b/docs/source/users/index.rst index 9abfe2f194..3f0bab7753 100644 --- a/docs/source/users/index.rst +++ b/docs/source/users/index.rst @@ -12,4 +12,4 @@ The Jupyter Server is a highly technical piece of the Jupyter Stack, so users pr installation configuration launching - help \ No newline at end of file + help diff --git a/docs/source/users/launching.rst b/docs/source/users/launching.rst index b90fb3eea0..1338b74439 100644 --- a/docs/source/users/launching.rst +++ b/docs/source/users/launching.rst @@ -23,4 +23,4 @@ Sometimes, though, it can be useful to start Jupyter Server directly when you wa file:///Users/username/jpserver-###-open.html Or copy and paste one of these URLs: http://localhost:8888/?token=<...> - or http://127.0.0.1:8888/?token=<...> \ No newline at end of file + or http://127.0.0.1:8888/?token=<...> diff --git a/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json b/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json index 42e7565113..fd4b771c90 100644 --- a/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json +++ b/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json @@ -1,7 +1,7 @@ { - "ServerApp": { - "jpserver_extensions": { - "simple_ext1": true - } + "ServerApp": { + "jpserver_extensions": { + "simple_ext1": true } + } } diff --git a/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json b/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json index 4f9cbbbfac..6857ee2c63 100644 --- a/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json +++ b/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json @@ -1,7 +1,7 @@ { - "ServerApp": { - "jpserver_extensions": { - "simple_ext11": true - } + "ServerApp": { + "jpserver_extensions": { + "simple_ext11": true } + } } diff --git a/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json b/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json index 8104acace1..287a167bda 100644 --- a/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json +++ b/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json @@ -1,7 +1,7 @@ { - "ServerApp": { - "jpserver_extensions": { - "simple_ext2": true - } + "ServerApp": { + "jpserver_extensions": { + "simple_ext2": true } + } } diff --git a/examples/simple/jupyter_server_config.py b/examples/simple/jupyter_server_config.py index d8f9314192..b994a7140c 100644 --- a/examples/simple/jupyter_server_config.py +++ b/examples/simple/jupyter_server_config.py @@ -1,8 +1,6 @@ # Configuration file for jupyter-server extensions. - -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Application(SingletonConfigurable) configuration -#------------------------------------------------------------------------------ - +# ------------------------------------------------------------------------------ ## The date format used by logging formatters for %(asctime)s -c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S Simple_Extensions_Example' +c.Application.log_datefmt = "%Y-%m-%d %H:%M:%S Simple_Extensions_Example" diff --git a/examples/simple/jupyter_simple_ext1_config.py b/examples/simple/jupyter_simple_ext1_config.py index 7139e046a7..f40b66afaf 100644 --- a/examples/simple/jupyter_simple_ext1_config.py +++ b/examples/simple/jupyter_simple_ext1_config.py @@ -1,4 +1,4 @@ -c.SimpleApp1.configA = 'ConfigA from file' -c.SimpleApp1.configB = 'ConfigB from file' -c.SimpleApp1.configC = 'ConfigC from file' -c.SimpleApp1.configD = 'ConfigD from file' +c.SimpleApp1.configA = "ConfigA from file" +c.SimpleApp1.configB = "ConfigB from file" +c.SimpleApp1.configC = "ConfigC from file" +c.SimpleApp1.configD = "ConfigD from file" diff --git a/examples/simple/jupyter_simple_ext2_config.py b/examples/simple/jupyter_simple_ext2_config.py index 6bdbc06089..f145cbb87a 100644 --- a/examples/simple/jupyter_simple_ext2_config.py +++ b/examples/simple/jupyter_simple_ext2_config.py @@ -1 +1 @@ -c.SimpleApp2.configD = 'ConfigD from file' +c.SimpleApp2.configD = "ConfigD from file" diff --git a/examples/simple/pyproject.toml b/examples/simple/pyproject.toml index e2d7e08323..d4ff20627c 100644 --- a/examples/simple/pyproject.toml +++ b/examples/simple/pyproject.toml @@ -1,3 +1,3 @@ [build-system] requires = ["jupyter_packaging~=0.5.0", "setuptools>=40.8.0", "wheel"] -build-backend = "setuptools.build_meta" \ No newline at end of file +build-backend = "setuptools.build_meta" diff --git a/examples/simple/setup.py b/examples/simple/setup.py index c15b124566..ec77d1b24c 100644 --- a/examples/simple/setup.py +++ b/examples/simple/setup.py @@ -1,58 +1,58 @@ import os -from setuptools import setup + from jupyter_packaging import create_cmdclass +from setuptools import setup -VERSION = '0.0.1' +VERSION = "0.0.1" def get_data_files(): - """Get the data files for the package. - """ + """Get the data files for the package.""" data_files = [ - ('etc/jupyter/jupyter_server_config.d', 'etc/jupyter/jupyter_server_config.d/', '*.json'), + ("etc/jupyter/jupyter_server_config.d", "etc/jupyter/jupyter_server_config.d/", "*.json"), ] + def add_data_files(path): for (dirpath, dirnames, filenames) in os.walk(path): if filenames: paths = [(dirpath, dirpath, filename) for filename in filenames] data_files.extend(paths) + # Add all static and templates folders. - add_data_files('simple_ext1/static') - add_data_files('simple_ext1/templates') - add_data_files('simple_ext2/static') - add_data_files('simple_ext2/templates') + add_data_files("simple_ext1/static") + add_data_files("simple_ext1/templates") + add_data_files("simple_ext2/static") + add_data_files("simple_ext2/templates") return data_files -cmdclass = create_cmdclass( - data_files_spec=get_data_files() -) +cmdclass = create_cmdclass(data_files_spec=get_data_files()) setup_args = dict( - name = 'jupyter_server_example', - version = VERSION, - description = 'Jupyter Server Example', - long_description = open('README.md').read(), - python_requires = '>=3.6', - install_requires = [ - 'jupyter_server', - 'jinja2', + name="jupyter_server_example", + version=VERSION, + description="Jupyter Server Example", + long_description=open("README.md").read(), + python_requires=">=3.6", + install_requires=[ + "jupyter_server", + "jinja2", ], - extras_require = { - 'test': ['pytest'], + extras_require={ + "test": ["pytest"], }, include_package_data=True, - cmdclass = cmdclass, - entry_points = { - 'console_scripts': [ - 'jupyter-simple-ext1 = simple_ext1.application:main', - 'jupyter-simple-ext11 = simple_ext11.application:main', - 'jupyter-simple-ext2 = simple_ext2.application:main' + cmdclass=cmdclass, + entry_points={ + "console_scripts": [ + "jupyter-simple-ext1 = simple_ext1.application:main", + "jupyter-simple-ext11 = simple_ext11.application:main", + "jupyter-simple-ext2 = simple_ext2.application:main", ] }, ) -if __name__ == '__main__': +if __name__ == "__main__": setup(**setup_args) diff --git a/examples/simple/simple_ext1/__init__.py b/examples/simple/simple_ext1/__init__.py index 4416f53792..7b0c65c96f 100644 --- a/examples/simple/simple_ext1/__init__.py +++ b/examples/simple/simple_ext1/__init__.py @@ -2,7 +2,4 @@ def _jupyter_server_extension_paths(): - return [{ - 'module': 'simple_ext1.application', - 'app': SimpleApp1 - }] \ No newline at end of file + return [{"module": "simple_ext1.application", "app": SimpleApp1}] diff --git a/examples/simple/simple_ext1/__main__.py b/examples/simple/simple_ext1/__main__.py index 6ca6f5d746..317a0bd1f5 100644 --- a/examples/simple/simple_ext1/__main__.py +++ b/examples/simple/simple_ext1/__main__.py @@ -1,4 +1,4 @@ from .application import main if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/simple/simple_ext1/application.py b/examples/simple/simple_ext1/application.py index 3cf9211ad7..91e734f871 100644 --- a/examples/simple/simple_ext1/application.py +++ b/examples/simple/simple_ext1/application.py @@ -1,63 +1,61 @@ import os + from traitlets import Unicode -from jupyter_server.extension.application import ExtensionApp, ExtensionAppJinjaMixin -from .handlers import (DefaultHandler, RedirectHandler, - ParameterHandler, TemplateHandler, TypescriptHandler, ErrorHandler) + +from .handlers import DefaultHandler +from .handlers import ErrorHandler +from .handlers import ParameterHandler +from .handlers import RedirectHandler +from .handlers import TemplateHandler +from .handlers import TypescriptHandler +from jupyter_server.extension.application import ExtensionApp +from jupyter_server.extension.application import ExtensionAppJinjaMixin DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "templates") + class SimpleApp1(ExtensionAppJinjaMixin, ExtensionApp): # The name of the extension. name = "simple_ext1" # The url that your extension will serve its homepage. - extension_url = '/simple_ext1/default' + extension_url = "/simple_ext1/default" # Should your extension expose other server extensions when launched directly? load_other_extensions = True # Local path to static files directory. - static_paths = [ - DEFAULT_STATIC_FILES_PATH - ] + static_paths = [DEFAULT_STATIC_FILES_PATH] # Local path to templates directory. - template_paths = [ - DEFAULT_TEMPLATE_FILES_PATH - ] + template_paths = [DEFAULT_TEMPLATE_FILES_PATH] - configA = Unicode('', - config=True, - help='Config A example.' - ) + configA = Unicode("", config=True, help="Config A example.") - configB = Unicode('', - config=True, - help='Config B example.' - ) + configB = Unicode("", config=True, help="Config B example.") - configC = Unicode('', - config=True, - help='Config C example.' - ) + configC = Unicode("", config=True, help="Config C example.") def initialize_handlers(self): - self.handlers.extend([ - (r'/{}/default'.format(self.name), DefaultHandler), - (r'/{}/params/(.+)$'.format(self.name), ParameterHandler), - (r'/{}/template1/(.*)$'.format(self.name), TemplateHandler), - (r'/{}/redirect'.format(self.name), RedirectHandler), - (r'/{}/typescript/?'.format(self.name), TypescriptHandler), - (r'/{}/(.*)', ErrorHandler) - ]) + self.handlers.extend( + [ + (r"/{}/default".format(self.name), DefaultHandler), + (r"/{}/params/(.+)$".format(self.name), ParameterHandler), + (r"/{}/template1/(.*)$".format(self.name), TemplateHandler), + (r"/{}/redirect".format(self.name), RedirectHandler), + (r"/{}/typescript/?".format(self.name), TypescriptHandler), + (r"/{}/(.*)", ErrorHandler), + ] + ) def initialize_settings(self): - self.log.info('Config {}'.format(self.config)) + self.log.info("Config {}".format(self.config)) + -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Main entry point -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- main = launch_new_instance = SimpleApp1.launch_instance diff --git a/examples/simple/simple_ext1/handlers.py b/examples/simple/simple_ext1/handlers.py index c3765edeca..b63d84811a 100644 --- a/examples/simple/simple_ext1/handlers.py +++ b/examples/simple/simple_ext1/handlers.py @@ -1,40 +1,51 @@ from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.extension.handler import ExtensionHandlerMixin, ExtensionHandlerJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerMixin from jupyter_server.utils import url_escape + class DefaultHandler(ExtensionHandlerMixin, JupyterHandler): def get(self): # The name of the extension to which this handler is linked. self.log.info("Extension Name in {} Default Handler: {}".format(self.name, self.name)) # A method for getting the url to static files (prefixed with /static/). - self.log.info("Static URL for / in simple_ext1 Default Handler: {}".format(self.static_url(path='/'))) - self.write('

Hello Simple 1 - I am the default...

') - self.write('Config in {} Default Handler: {}'.format(self.name, self.config)) + self.log.info( + "Static URL for / in simple_ext1 Default Handler: {}".format(self.static_url(path="/")) + ) + self.write("

Hello Simple 1 - I am the default...

") + self.write("Config in {} Default Handler: {}".format(self.name, self.config)) + class RedirectHandler(ExtensionHandlerMixin, JupyterHandler): def get(self): self.redirect("/static/{}/favicon.ico".format(self.name)) + class ParameterHandler(ExtensionHandlerMixin, JupyterHandler): def get(self, matched_part=None, *args, **kwargs): - var1 = self.get_argument('var1', default=None) + var1 = self.get_argument("var1", default=None) components = [x for x in self.request.path.split("/") if x] - self.write('

Hello Simple App 1 from Handler.

') - self.write('

matched_part: {}

'.format(url_escape(matched_part))) - self.write('

var1: {}

'.format(url_escape(var1))) - self.write('

components: {}

'.format(components)) + self.write("

Hello Simple App 1 from Handler.

") + self.write("

matched_part: {}

".format(url_escape(matched_part))) + self.write("

var1: {}

".format(url_escape(var1))) + self.write("

components: {}

".format(components)) + + +class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): + pass -class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): pass class TypescriptHandler(BaseTemplateHandler): def get(self): self.write(self.render_template("typescript.html")) + class TemplateHandler(BaseTemplateHandler): def get(self, path): """ Optionaly, you can print(self.get_template('simple1.html'))""" - self.write(self.render_template('simple1.html', path=path)) + self.write(self.render_template("simple1.html", path=path)) + class ErrorHandler(BaseTemplateHandler): def get(self, path): - self.write(self.render_template('error.html', path=path)) + self.write(self.render_template("error.html", path=path)) diff --git a/examples/simple/simple_ext1/static/bundle.js b/examples/simple/simple_ext1/static/bundle.js index f6b8f608a0..9590862ded 100644 --- a/examples/simple/simple_ext1/static/bundle.js +++ b/examples/simple/simple_ext1/static/bundle.js @@ -1,111 +1,146 @@ -/******/ (function(modules) { // webpackBootstrap -/******/ // The module cache -/******/ var installedModules = {}; -/******/ -/******/ // The require function -/******/ function __webpack_require__(moduleId) { -/******/ -/******/ // Check if module is in cache -/******/ if(installedModules[moduleId]) { -/******/ return installedModules[moduleId].exports; -/******/ } -/******/ // Create a new module (and put it into the cache) -/******/ var module = installedModules[moduleId] = { -/******/ i: moduleId, -/******/ l: false, -/******/ exports: {} -/******/ }; -/******/ -/******/ // Execute the module function -/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); -/******/ -/******/ // Flag the module as loaded -/******/ module.l = true; -/******/ -/******/ // Return the exports of the module -/******/ return module.exports; -/******/ } -/******/ -/******/ -/******/ // expose the modules object (__webpack_modules__) -/******/ __webpack_require__.m = modules; -/******/ -/******/ // expose the module cache -/******/ __webpack_require__.c = installedModules; -/******/ -/******/ // define getter function for harmony exports -/******/ __webpack_require__.d = function(exports, name, getter) { -/******/ if(!__webpack_require__.o(exports, name)) { -/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter }); -/******/ } -/******/ }; -/******/ -/******/ // define __esModule on exports -/******/ __webpack_require__.r = function(exports) { -/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { -/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); -/******/ } -/******/ Object.defineProperty(exports, '__esModule', { value: true }); -/******/ }; -/******/ -/******/ // create a fake namespace object -/******/ // mode & 1: value is a module id, require it -/******/ // mode & 2: merge all properties of value into the ns -/******/ // mode & 4: return value when already ns object -/******/ // mode & 8|1: behave like require -/******/ __webpack_require__.t = function(value, mode) { -/******/ if(mode & 1) value = __webpack_require__(value); -/******/ if(mode & 8) return value; -/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value; -/******/ var ns = Object.create(null); -/******/ __webpack_require__.r(ns); -/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value }); -/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key)); -/******/ return ns; -/******/ }; -/******/ -/******/ // getDefaultExport function for compatibility with non-harmony modules -/******/ __webpack_require__.n = function(module) { -/******/ var getter = module && module.__esModule ? -/******/ function getDefault() { return module['default']; } : -/******/ function getModuleExports() { return module; }; -/******/ __webpack_require__.d(getter, 'a', getter); -/******/ return getter; -/******/ }; -/******/ -/******/ // Object.prototype.hasOwnProperty.call -/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; -/******/ -/******/ // __webpack_public_path__ -/******/ __webpack_require__.p = ""; -/******/ -/******/ -/******/ // Load entry module and return exports -/******/ return __webpack_require__(__webpack_require__.s = 0); -/******/ }) -/************************************************************************/ -/******/ ({ - -/***/ "./simple_ext1/static/index.js": -/*!*************************************!*\ +/******/ (function (modules) { + // webpackBootstrap + /******/ // The module cache + /******/ var installedModules = {}; // The require function + /******/ + /******/ /******/ function __webpack_require__(moduleId) { + /******/ + /******/ // Check if module is in cache + /******/ if (installedModules[moduleId]) { + /******/ return installedModules[moduleId].exports; + /******/ + } // Create a new module (and put it into the cache) + /******/ /******/ var module = (installedModules[moduleId] = { + /******/ i: moduleId, + /******/ l: false, + /******/ exports: {}, + /******/ + }); // Execute the module function + /******/ + /******/ /******/ modules[moduleId].call( + module.exports, + module, + module.exports, + __webpack_require__ + ); // Flag the module as loaded + /******/ + /******/ /******/ module.l = true; // Return the exports of the module + /******/ + /******/ /******/ return module.exports; + /******/ + } // expose the modules object (__webpack_modules__) + /******/ + /******/ + /******/ /******/ __webpack_require__.m = modules; // expose the module cache + /******/ + /******/ /******/ __webpack_require__.c = installedModules; // define getter function for harmony exports + /******/ + /******/ /******/ __webpack_require__.d = function (exports, name, getter) { + /******/ if (!__webpack_require__.o(exports, name)) { + /******/ Object.defineProperty(exports, name, { + enumerable: true, + get: getter, + }); + /******/ + } + /******/ + }; // define __esModule on exports + /******/ + /******/ /******/ __webpack_require__.r = function (exports) { + /******/ if (typeof Symbol !== "undefined" && Symbol.toStringTag) { + /******/ Object.defineProperty(exports, Symbol.toStringTag, { + value: "Module", + }); + /******/ + } + /******/ Object.defineProperty(exports, "__esModule", { value: true }); + /******/ + }; // create a fake namespace object // mode & 1: value is a module id, require it // mode & 2: merge all properties of value into the ns // mode & 4: return value when already ns object // mode & 8|1: behave like require + /******/ + /******/ /******/ /******/ /******/ /******/ /******/ __webpack_require__.t = function ( + value, + mode + ) { + /******/ if (mode & 1) value = __webpack_require__(value); + /******/ if (mode & 8) return value; + /******/ if ( + mode & 4 && + typeof value === "object" && + value && + value.__esModule + ) + return value; + /******/ var ns = Object.create(null); + /******/ __webpack_require__.r(ns); + /******/ Object.defineProperty(ns, "default", { + enumerable: true, + value: value, + }); + /******/ if (mode & 2 && typeof value != "string") + for (var key in value) + __webpack_require__.d( + ns, + key, + function (key) { + return value[key]; + }.bind(null, key) + ); + /******/ return ns; + /******/ + }; // getDefaultExport function for compatibility with non-harmony modules + /******/ + /******/ /******/ __webpack_require__.n = function (module) { + /******/ var getter = + module && module.__esModule + ? /******/ function getDefault() { + return module["default"]; + } + : /******/ function getModuleExports() { + return module; + }; + /******/ __webpack_require__.d(getter, "a", getter); + /******/ return getter; + /******/ + }; // Object.prototype.hasOwnProperty.call + /******/ + /******/ /******/ __webpack_require__.o = function (object, property) { + return Object.prototype.hasOwnProperty.call(object, property); + }; // __webpack_public_path__ + /******/ + /******/ /******/ __webpack_require__.p = ""; // Load entry module and return exports + /******/ + /******/ + /******/ /******/ return __webpack_require__((__webpack_require__.s = 0)); + /******/ +})( + /************************************************************************/ + /******/ { + /***/ "./simple_ext1/static/index.js": + /*!*************************************!*\ !*** ./simple_ext1/static/index.js ***! \*************************************/ -/*! no static exports found */ -/***/ (function(module, exports) { - -eval("function main() {\n let div = document.getElementById(\"mydiv\");\n div.innerText = \"Hello from Typescript\";\n}\nwindow.addEventListener('load', main);\n\n\n//# sourceURL=webpack:///./simple_ext1/static/index.js?"); + /*! no static exports found */ + /***/ function (module, exports) { + eval( + 'function main() {\n let div = document.getElementById("mydiv");\n div.innerText = "Hello from Typescript";\n}\nwindow.addEventListener(\'load\', main);\n\n\n//# sourceURL=webpack:///./simple_ext1/static/index.js?' + ); -/***/ }), + /***/ + }, -/***/ 0: -/*!*******************************************!*\ + /***/ 0: + /*!*******************************************!*\ !*** multi ./simple_ext1/static/index.js ***! \*******************************************/ -/*! no static exports found */ -/***/ (function(module, exports, __webpack_require__) { - -eval("module.exports = __webpack_require__(/*! ./simple_ext1/static/index.js */\"./simple_ext1/static/index.js\");\n\n\n//# sourceURL=webpack:///multi_./simple_ext1/static/index.js?"); + /*! no static exports found */ + /***/ function (module, exports, __webpack_require__) { + eval( + 'module.exports = __webpack_require__(/*! ./simple_ext1/static/index.js */"./simple_ext1/static/index.js");\n\n\n//# sourceURL=webpack:///multi_./simple_ext1/static/index.js?' + ); -/***/ }) + /***/ + }, -/******/ }); \ No newline at end of file + /******/ + } +); diff --git a/examples/simple/simple_ext1/static/index.js b/examples/simple/simple_ext1/static/index.js index a7a67eed18..4cc84b9bc3 100644 --- a/examples/simple/simple_ext1/static/index.js +++ b/examples/simple/simple_ext1/static/index.js @@ -1,5 +1,5 @@ function main() { - let div = document.getElementById("mydiv"); - div.innerText = "Hello from Typescript"; + let div = document.getElementById("mydiv"); + div.innerText = "Hello from Typescript"; } -window.addEventListener('load', main); +window.addEventListener("load", main); diff --git a/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo b/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo index 62d6680e8a..8167ef00a2 100644 --- a/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo +++ b/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo @@ -95,4 +95,4 @@ ] }, "version": "3.6.4" -} \ No newline at end of file +} diff --git a/examples/simple/simple_ext11/__init__.py b/examples/simple/simple_ext11/__init__.py index c9f9bbe1eb..abe0f73a2a 100644 --- a/examples/simple/simple_ext11/__init__.py +++ b/examples/simple/simple_ext11/__init__.py @@ -2,9 +2,4 @@ def _jupyter_server_extension_paths(): - return [ - { - 'module': 'simple_ext11.application', - 'app': SimpleApp11 - } - ] \ No newline at end of file + return [{"module": "simple_ext11.application", "app": SimpleApp11}] diff --git a/examples/simple/simple_ext11/__main__.py b/examples/simple/simple_ext11/__main__.py index 6ca6f5d746..317a0bd1f5 100644 --- a/examples/simple/simple_ext11/__main__.py +++ b/examples/simple/simple_ext11/__main__.py @@ -1,4 +1,4 @@ from .application import main if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/simple/simple_ext11/application.py b/examples/simple/simple_ext11/application.py index b772bb34ff..82411dd0a6 100644 --- a/examples/simple/simple_ext11/application.py +++ b/examples/simple/simple_ext11/application.py @@ -1,71 +1,75 @@ import os + from simple_ext1.application import SimpleApp1 -from jupyter_server.serverapp import aliases, flags -from traitlets import Bool, Unicode, observe +from traitlets import Bool +from traitlets import observe +from traitlets import Unicode + +from jupyter_server.serverapp import aliases +from jupyter_server.serverapp import flags DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "./../simple_ext1/static") DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "./../simple_ext1/templates") + class SimpleApp11(SimpleApp1): - flags['hello']=( - { 'SimpleApp11' : {'hello' : True} }, "Say hello on startup." + flags["hello"] = ({"SimpleApp11": {"hello": True}}, "Say hello on startup.") + aliases.update( + { + "simple11-dir": "SimpleApp11.simple11_dir", + } ) - aliases.update({ - 'simple11-dir': 'SimpleApp11.simple11_dir', - }) # The name of the extension. name = "simple_ext11" # Te url that your extension will serve its homepage. - extension_url = '/simple_ext11/default' + extension_url = "/simple_ext11/default" # Local path to static files directory. - static_paths = [ - DEFAULT_STATIC_FILES_PATH - ] + static_paths = [DEFAULT_STATIC_FILES_PATH] # Local path to templates directory. - template_paths = [ - DEFAULT_TEMPLATE_FILES_PATH - ] + template_paths = [DEFAULT_TEMPLATE_FILES_PATH] - simple11_dir = Unicode('', - config=True, - help='Simple directory' - ) + simple11_dir = Unicode("", config=True, help="Simple directory") - hello = Bool(False, + hello = Bool( + False, config=True, - help='Say hello', + help="Say hello", ) - ignore_js = Bool(False, + ignore_js = Bool( + False, config=True, - help='Ignore Javascript', + help="Ignore Javascript", ) - @observe('ignore_js') + @observe("ignore_js") def _update_ignore_js(self, change): """TODO Does the observe work?""" - self.log.info('ignore_js has just changed: {}'.format(change)) + self.log.info("ignore_js has just changed: {}".format(change)) @property def simple11_dir_formatted(self): return "/" + self.simple11_dir def initialize_settings(self): - self.log.info('hello: {}'.format(self.hello)) + self.log.info("hello: {}".format(self.hello)) if self.hello == True: - self.log.info("Hello Simple11: You have launched with --hello flag or defined 'c.SimpleApp1.hello == True' in your config file") - self.log.info('ignore_js: {}'.format(self.ignore_js)) + self.log.info( + "Hello Simple11: You have launched with --hello flag or defined 'c.SimpleApp1.hello == True' in your config file" + ) + self.log.info("ignore_js: {}".format(self.ignore_js)) super().initialize_settings() def initialize_handlers(self): super().initialize_handlers() -#----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- # Main entry point -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- main = launch_new_instance = SimpleApp11.launch_instance diff --git a/examples/simple/simple_ext2/__init__.py b/examples/simple/simple_ext2/__init__.py index 8c47ed420f..ffe7bc43c3 100644 --- a/examples/simple/simple_ext2/__init__.py +++ b/examples/simple/simple_ext2/__init__.py @@ -3,8 +3,5 @@ def _jupyter_server_extension_paths(): return [ - { - 'module': 'simple_ext2.application', - 'app': SimpleApp2 - }, - ] \ No newline at end of file + {"module": "simple_ext2.application", "app": SimpleApp2}, + ] diff --git a/examples/simple/simple_ext2/__main__.py b/examples/simple/simple_ext2/__main__.py index 6ca6f5d746..317a0bd1f5 100644 --- a/examples/simple/simple_ext2/__main__.py +++ b/examples/simple/simple_ext2/__main__.py @@ -1,4 +1,4 @@ from .application import main if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/simple/simple_ext2/application.py b/examples/simple/simple_ext2/application.py index d485265691..fcda51dc9d 100644 --- a/examples/simple/simple_ext2/application.py +++ b/examples/simple/simple_ext2/application.py @@ -1,50 +1,53 @@ import os + from traitlets import Unicode -from jupyter_server.extension.application import ExtensionApp, ExtensionAppJinjaMixin -from .handlers import ParameterHandler, TemplateHandler, IndexHandler, ErrorHandler + +from .handlers import ErrorHandler +from .handlers import IndexHandler +from .handlers import ParameterHandler +from .handlers import TemplateHandler +from jupyter_server.extension.application import ExtensionApp +from jupyter_server.extension.application import ExtensionAppJinjaMixin DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "templates") + class SimpleApp2(ExtensionAppJinjaMixin, ExtensionApp): # The name of the extension. name = "simple_ext2" # Te url that your extension will serve its homepage. - extension_url = '/simple_ext2' + extension_url = "/simple_ext2" # Should your extension expose other server extensions when launched directly? load_other_extensions = True # Local path to static files directory. - static_paths = [ - DEFAULT_STATIC_FILES_PATH - ] + static_paths = [DEFAULT_STATIC_FILES_PATH] # Local path to templates directory. - template_paths = [ - DEFAULT_TEMPLATE_FILES_PATH - ] + template_paths = [DEFAULT_TEMPLATE_FILES_PATH] - configD = Unicode('', - config=True, - help='Config D example.' - ) + configD = Unicode("", config=True, help="Config D example.") def initialize_handlers(self): - self.handlers.extend([ - (r'/simple_ext2/params/(.+)$', ParameterHandler), - (r'/simple_ext2/template', TemplateHandler), - (r'/simple_ext2/?', IndexHandler), - (r'/simple_ext2/(.*)', ErrorHandler) - ]) + self.handlers.extend( + [ + (r"/simple_ext2/params/(.+)$", ParameterHandler), + (r"/simple_ext2/template", TemplateHandler), + (r"/simple_ext2/?", IndexHandler), + (r"/simple_ext2/(.*)", ErrorHandler), + ] + ) def initialize_settings(self): - self.log.info('Config {}'.format(self.config)) + self.log.info("Config {}".format(self.config)) + -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Main entry point -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- main = launch_new_instance = SimpleApp2.launch_instance diff --git a/examples/simple/simple_ext2/handlers.py b/examples/simple/simple_ext2/handlers.py index febfeae168..2e37fe87c6 100644 --- a/examples/simple/simple_ext2/handlers.py +++ b/examples/simple/simple_ext2/handlers.py @@ -1,27 +1,34 @@ from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.extension.handler import ExtensionHandlerMixin, ExtensionHandlerJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerMixin from jupyter_server.utils import url_escape + class ParameterHandler(ExtensionHandlerMixin, JupyterHandler): def get(self, matched_part=None, *args, **kwargs): - var1 = self.get_argument('var1', default=None) + var1 = self.get_argument("var1", default=None) components = [x for x in self.request.path.split("/") if x] - self.write('

Hello Simple App 2 from Handler.

') - self.write('

matched_part: {}

'.format(url_escape(matched_part))) - self.write('

var1: {}

'.format(url_escape(var1))) - self.write('

components: {}

'.format(components)) + self.write("

Hello Simple App 2 from Handler.

") + self.write("

matched_part: {}

".format(url_escape(matched_part))) + self.write("

var1: {}

".format(url_escape(var1))) + self.write("

components: {}

".format(components)) + + +class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): + pass -class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): pass class IndexHandler(BaseTemplateHandler): def get(self): self.write(self.render_template("index.html")) + class TemplateHandler(BaseTemplateHandler): def get(self, path): - print(self.get_template('simple_ext2.html')) - self.write(self.render_template('simple_ext2.html', path=path)) + print(self.get_template("simple_ext2.html")) + self.write(self.render_template("simple_ext2.html", path=path)) + class ErrorHandler(BaseTemplateHandler): def get(self, path): - self.write(self.render_template('error.html')) + self.write(self.render_template("error.html")) diff --git a/examples/simple/src/index.ts b/examples/simple/src/index.ts index 3137216e29..c950e96223 100644 --- a/examples/simple/src/index.ts +++ b/examples/simple/src/index.ts @@ -3,4 +3,4 @@ function main() { div.innerText = "Hello from Typescript"; } -window.addEventListener('load', main); +window.addEventListener("load", main); diff --git a/examples/simple/src/tsconfig.json b/examples/simple/src/tsconfig.json index 711f9c1afc..e987b05d93 100644 --- a/examples/simple/src/tsconfig.json +++ b/examples/simple/src/tsconfig.json @@ -1,4 +1,3 @@ - { "compilerOptions": { "outDir": "../simple_ext1/static", diff --git a/examples/simple/tests/test_handlers.py b/examples/simple/tests/test_handlers.py index 7d231666bd..7c4cb69b29 100644 --- a/examples/simple/tests/test_handlers.py +++ b/examples/simple/tests/test_handlers.py @@ -4,27 +4,17 @@ @pytest.fixture def jp_server_config(jp_template_dir): return { - "ServerApp": { - "jpserver_extensions": { - "simple_ext1": True - } - }, - } + "ServerApp": {"jpserver_extensions": {"simple_ext1": True}}, + } async def test_handler_default(jp_fetch): - r = await jp_fetch( - 'simple_ext1/default', - method='GET' - ) + r = await jp_fetch("simple_ext1/default", method="GET") assert r.code == 200 print(r.body.decode()) - assert r.body.decode().index('Hello Simple 1 - I am the default...') > -1 + assert r.body.decode().index("Hello Simple 1 - I am the default...") > -1 async def test_handler_template(jp_fetch): - r = await jp_fetch( - 'simple_ext1/template1/test', - method='GET' - ) + r = await jp_fetch("simple_ext1/template1/test", method="GET") assert r.code == 200 diff --git a/examples/simple/webpack.config.js b/examples/simple/webpack.config.js index 6016ce5260..c0f4735649 100644 --- a/examples/simple/webpack.config.js +++ b/examples/simple/webpack.config.js @@ -1,8 +1,8 @@ module.exports = { - entry: ['./simple_ext1/static/index.js'], + entry: ["./simple_ext1/static/index.js"], output: { - path: require('path').join(__dirname, 'simple_ext1', 'static'), - filename: 'bundle.js' + path: require("path").join(__dirname, "simple_ext1", "static"), + filename: "bundle.js", }, - mode: 'development' -} + mode: "development", +}; diff --git a/jupyter_server/__init__.py b/jupyter_server/__init__.py index bb128fd043..e15e4e4671 100644 --- a/jupyter_server/__init__.py +++ b/jupyter_server/__init__.py @@ -1,13 +1,12 @@ """The Jupyter Server""" - import os -import sys import subprocess +import sys DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_PATH_LIST = [ os.path.dirname(__file__), - os.path.join(os.path.dirname(__file__), 'templates'), + os.path.join(os.path.dirname(__file__), "templates"), ] DEFAULT_JUPYTER_SERVER_PORT = 8888 @@ -24,7 +23,7 @@ def _cleanup(): # patch subprocess on Windows for python<3.7 # see https://bugs.python.org/issue37380 # the fix for python3.7: https://github.com/python/cpython/pull/15706/files -if sys.platform == 'win32': +if sys.platform == "win32": if sys.version_info < (3, 7): subprocess._cleanup = _cleanup subprocess._active = None diff --git a/jupyter_server/__main__.py b/jupyter_server/__main__.py index 12eaa92455..6ada4be7ea 100644 --- a/jupyter_server/__main__.py +++ b/jupyter_server/__main__.py @@ -1,4 +1,4 @@ - -if __name__ == '__main__': +if __name__ == "__main__": from jupyter_server import serverapp as app + app.launch_new_instance() diff --git a/jupyter_server/_sysinfo.py b/jupyter_server/_sysinfo.py index 6e57102567..1f1cbf2251 100644 --- a/jupyter_server/_sysinfo.py +++ b/jupyter_server/_sysinfo.py @@ -2,19 +2,18 @@ """ Utilities for getting information about Jupyter and the system it's running in. """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import os import platform -import sys import subprocess +import sys from ipython_genutils import encoding import jupyter_server + def pkg_commit_hash(pkg_path): """Get short form of commit hash given directory `pkg_path` @@ -42,23 +41,25 @@ def pkg_commit_hash(pkg_path): par_path = pkg_path while cur_path != par_path: cur_path = par_path - if p.exists(p.join(cur_path, '.git')): + if p.exists(p.join(cur_path, ".git")): try: - proc = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=pkg_path) + proc = subprocess.Popen( + ["git", "rev-parse", "--short", "HEAD"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=pkg_path, + ) repo_commit, _ = proc.communicate() except OSError: repo_commit = None if repo_commit: - return 'repository', repo_commit.strip().decode('ascii') + return "repository", repo_commit.strip().decode("ascii") else: - return u'', u'' + return u"", u"" par_path = p.dirname(par_path) - return u'', u'' + return u"", u"" def pkg_info(pkg_path): @@ -86,11 +87,11 @@ def pkg_info(pkg_path): platform=platform.platform(), os_name=os.name, default_encoding=encoding.DEFAULT_ENCODING, - ) + ) + def get_sys_info(): """Return useful information about the system as a dict.""" p = os.path path = p.realpath(p.dirname(p.abspath(p.join(jupyter_server.__file__)))) return pkg_info(path) - diff --git a/jupyter_server/_tz.py b/jupyter_server/_tz.py index 84ba012efa..4ea8cfcbc2 100644 --- a/jupyter_server/_tz.py +++ b/jupyter_server/_tz.py @@ -4,15 +4,16 @@ Just UTC-awareness right now """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -from datetime import tzinfo, timedelta, datetime +from datetime import datetime +from datetime import timedelta +from datetime import tzinfo # constant for zero offset ZERO = timedelta(0) + class tzUTC(tzinfo): """tzinfo object for UTC (zero offset)""" @@ -22,21 +23,27 @@ def utcoffset(self, d): def dst(self, d): return ZERO + UTC = tzUTC() + def utc_aware(unaware): """decorator for adding UTC tzinfo to datetime's utcfoo methods""" + def utc_method(*args, **kwargs): dt = unaware(*args, **kwargs) return dt.replace(tzinfo=UTC) + return utc_method + utcfromtimestamp = utc_aware(datetime.utcfromtimestamp) utcnow = utc_aware(datetime.utcnow) + def isoformat(dt): """Return iso-formatted timestamp Like .isoformat(), but uses Z for UTC instead of +00:00 """ - return dt.isoformat().replace('+00:00', 'Z') + return dt.isoformat().replace("+00:00", "Z") diff --git a/jupyter_server/_version.py b/jupyter_server/_version.py index 428a3db1ee..224b02ecf5 100644 --- a/jupyter_server/_version.py +++ b/jupyter_server/_version.py @@ -5,12 +5,12 @@ import re # Version string must appear intact for tbump versioning -__version__ = '1.11.0.dev0' +__version__ = "1.11.0.dev0" # Build up version_info tuple for backwards compatibility -pattern = r'(?P\d+).(?P\d+).(?P\d+)(?P.*)' +pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" match = re.match(pattern, __version__) -parts = [int(match[part]) for part in ['major', 'minor', 'patch']] -if match['rest']: - parts.append(match['rest']) +parts = [int(match[part]) for part in ["major", "minor", "patch"]] +if match["rest"]: + parts.append(match["rest"]) version_info = tuple(parts) diff --git a/jupyter_server/auth/__main__.py b/jupyter_server/auth/__main__.py index 322891df1c..b34a3189c1 100644 --- a/jupyter_server/auth/__main__.py +++ b/jupyter_server/auth/__main__.py @@ -1,42 +1,55 @@ -from jupyter_server.auth import passwd -from getpass import getpass -from jupyter_server.config_manager import BaseJSONConfigManager -from jupyter_core.paths import jupyter_config_dir import argparse import sys +from getpass import getpass + +from jupyter_core.paths import jupyter_config_dir + +from jupyter_server.auth import passwd +from jupyter_server.config_manager import BaseJSONConfigManager + def set_password(args): - password = args.password - while not password : - password1 = getpass("" if args.quiet else "Provide password: ") - password_repeat = getpass("" if args.quiet else "Repeat password: ") - if password1 != password_repeat: - print("Passwords do not match, try again") - elif len(password1) < 4: - print("Please provide at least 4 characters") - else: - password = password1 - - password_hash = passwd(password) - cfg = BaseJSONConfigManager(config_dir=jupyter_config_dir()) - cfg.update('jupyter_server_config', { - 'ServerApp': { - 'password': password_hash, - } - }) - if not args.quiet: - print("password stored in config dir: %s" % jupyter_config_dir()) + password = args.password + while not password: + password1 = getpass("" if args.quiet else "Provide password: ") + password_repeat = getpass("" if args.quiet else "Repeat password: ") + if password1 != password_repeat: + print("Passwords do not match, try again") + elif len(password1) < 4: + print("Please provide at least 4 characters") + else: + password = password1 + + password_hash = passwd(password) + cfg = BaseJSONConfigManager(config_dir=jupyter_config_dir()) + cfg.update( + "jupyter_server_config", + { + "ServerApp": { + "password": password_hash, + } + }, + ) + if not args.quiet: + print("password stored in config dir: %s" % jupyter_config_dir()) + def main(argv): - parser = argparse.ArgumentParser(argv[0]) - subparsers = parser.add_subparsers() - parser_password = subparsers.add_parser('password', help='sets a password for your jupyter server') - parser_password.add_argument("password", help="password to set, if not given, a password will be queried for (NOTE: this may not be safe)", - nargs="?") - parser_password.add_argument("--quiet", help="suppress messages", action="store_true") - parser_password.set_defaults(function=set_password) - args = parser.parse_args(argv[1:]) - args.function(args) - + parser = argparse.ArgumentParser(argv[0]) + subparsers = parser.add_subparsers() + parser_password = subparsers.add_parser( + "password", help="sets a password for your jupyter server" + ) + parser_password.add_argument( + "password", + help="password to set, if not given, a password will be queried for (NOTE: this may not be safe)", + nargs="?", + ) + parser_password.add_argument("--quiet", help="suppress messages", action="store_true") + parser_password.set_defaults(function=set_password) + args = parser.parse_args(argv[1:]) + args.function(args) + + if __name__ == "__main__": - main(sys.argv) + main(sys.argv) diff --git a/jupyter_server/auth/login.py b/jupyter_server/auth/login.py index 30867c516a..b31168ecc0 100644 --- a/jupyter_server/auth/login.py +++ b/jupyter_server/auth/login.py @@ -1,19 +1,16 @@ """Tornado handlers for logging into the Jupyter Server.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -import re import os +import re import uuid - from urllib.parse import urlparse from tornado.escape import url_escape -from .security import passwd_check, set_password - from ..base.handlers import JupyterHandler +from .security import passwd_check +from .security import set_password class LoginHandler(JupyterHandler): @@ -21,11 +18,15 @@ class LoginHandler(JupyterHandler): authenticates with a hashed password from the configuration. """ + def _render(self, message=None): - self.write(self.render_template('login.html', - next=url_escape(self.get_argument('next', default=self.base_url)), + self.write( + self.render_template( + "login.html", + next=url_escape(self.get_argument("next", default=self.base_url)), message=message, - )) + ) + ) def _redirect_safe(self, url, default=None): """Redirect if url is on our PATH @@ -47,7 +48,7 @@ def _redirect_safe(self, url, default=None): # OR pass our cross-origin check if parsed.netloc: # if full URL, run our cross-origin check: - origin = '%s://%s' % (parsed.scheme, parsed.netloc) + origin = "%s://%s" % (parsed.scheme, parsed.netloc) origin = origin.lower() if self.allow_origin: allow = self.allow_origin == origin @@ -61,7 +62,7 @@ def _redirect_safe(self, url, default=None): def get(self): if self.current_user: - next_url = self.get_argument('next', default=self.base_url) + next_url = self.get_argument("next", default=self.base_url) self._redirect_safe(next_url) else: self._render() @@ -74,8 +75,8 @@ def passwd_check(self, a, b): return passwd_check(a, b) def post(self): - typed_password = self.get_argument('password', default=u'') - new_password = self.get_argument('new_password', default=u'') + typed_password = self.get_argument("password", default=u"") + new_password = self.get_argument("new_password", default=u"") if self.get_login_available(self.settings): if self.passwd_check(self.hashed_password, typed_password) and not new_password: @@ -84,34 +85,31 @@ def post(self): self.set_login_cookie(self, uuid.uuid4().hex) if new_password and self.settings.get("allow_password_change"): config_dir = self.settings.get("config_dir") - config_file = os.path.join( - config_dir, "jupyter_server_config.json" - ) + config_file = os.path.join(config_dir, "jupyter_server_config.json") set_password(new_password, config_file=config_file) self.log.info("Wrote hashed password to %s" % config_file) else: self.set_status(401) - self._render(message={'error': 'Invalid credentials'}) + self._render(message={"error": "Invalid credentials"}) return - - next_url = self.get_argument('next', default=self.base_url) + next_url = self.get_argument("next", default=self.base_url) self._redirect_safe(next_url) @classmethod def set_login_cookie(cls, handler, user_id=None): """Call this on handlers to set the login cookie for success""" - cookie_options = handler.settings.get('cookie_options', {}) - cookie_options.setdefault('httponly', True) + cookie_options = handler.settings.get("cookie_options", {}) + cookie_options.setdefault("httponly", True) # tornado <4.2 has a bug that considers secure==True as soon as # 'secure' kwarg is passed to set_secure_cookie - if handler.settings.get('secure_cookie', handler.request.protocol == 'https'): - cookie_options.setdefault('secure', True) - cookie_options.setdefault('path', handler.base_url) + if handler.settings.get("secure_cookie", handler.request.protocol == "https"): + cookie_options.setdefault("secure", True) + cookie_options.setdefault("path", handler.base_url) handler.set_secure_cookie(handler.cookie_name, user_id, **cookie_options) return user_id - auth_header_pat = re.compile('token\s+(.+)', re.IGNORECASE) + auth_header_pat = re.compile("token\s+(.+)", re.IGNORECASE) @classmethod def get_token(cls, handler): @@ -123,10 +121,10 @@ def get_token(cls, handler): - in header: Authorization: token """ - user_token = handler.get_argument('token', '') + user_token = handler.get_argument("token", "") if not user_token: # get it from Authorization header - m = cls.auth_header_pat.match(handler.request.headers.get('Authorization', '')) + m = cls.auth_header_pat.match(handler.request.headers.get("Authorization", "")) if m: user_token = m.group(1) return user_token @@ -153,10 +151,10 @@ def is_token_authenticated(cls, handler): - xsrf protection - skip origin-checks for scripts """ - if getattr(handler, '_user_id', None) is None: + if getattr(handler, "_user_id", None) is None: # ensure get_user has been called, so we know if we're token-authenticated handler.get_current_user() - return getattr(handler, '_token_authenticated', False) + return getattr(handler, "_token_authenticated", False) @classmethod def get_user(cls, handler): @@ -166,12 +164,12 @@ def get_user(cls, handler): """ # Can't call this get_current_user because it will collide when # called on LoginHandler itself. - if getattr(handler, '_user_id', None): + if getattr(handler, "_user_id", None): return handler._user_id user_id = cls.get_user_token(handler) if user_id is None: - get_secure_cookie_kwargs = handler.settings.get('get_secure_cookie_kwargs', {}) - user_id = handler.get_secure_cookie(handler.cookie_name, **get_secure_cookie_kwargs ) + get_secure_cookie_kwargs = handler.settings.get("get_secure_cookie_kwargs", {}) + user_id = handler.get_secure_cookie(handler.cookie_name, **get_secure_cookie_kwargs) if user_id: user_id = user_id.decode() else: @@ -189,7 +187,7 @@ def get_user(cls, handler): if not handler.login_available: # Completely insecure! No authentication at all. # No need to warn here, though; validate_security will have already done that. - user_id = 'anonymous' + user_id = "anonymous" # cache value for future retrievals on the same request handler._user_id = user_id @@ -211,7 +209,9 @@ def get_user_token(cls, handler): authenticated = False if user_token == token: # token-authenticated, set the login cookie - handler.log.debug("Accepting token-authenticated connection from %s", handler.request.remote_ip) + handler.log.debug( + "Accepting token-authenticated connection from %s", handler.request.remote_ip + ) authenticated = True if authenticated: @@ -219,7 +219,6 @@ def get_user_token(cls, handler): else: return None - @classmethod def validate_security(cls, app, ssl_options=None): """Check the application's security. @@ -229,16 +228,18 @@ def validate_security(cls, app, ssl_options=None): if not app.ip: warning = "WARNING: The Jupyter server is listening on all IP addresses" if ssl_options is None: - app.log.warning(warning + " and not using encryption. This " - "is not recommended.") + app.log.warning(warning + " and not using encryption. This " "is not recommended.") if not app.password and not app.token: - app.log.warning(warning + " and not using authentication. " - "This is highly insecure and not recommended.") + app.log.warning( + warning + " and not using authentication. " + "This is highly insecure and not recommended." + ) else: if not app.password and not app.token: app.log.warning( "All authentication is disabled." - " Anyone who can connect to this server will be able to run code.") + " Anyone who can connect to this server will be able to run code." + ) @classmethod def password_from_settings(cls, settings): @@ -246,9 +247,9 @@ def password_from_settings(cls, settings): If there is no configured password, an empty string will be returned. """ - return settings.get('password', u'') + return settings.get("password", u"") @classmethod def get_login_available(cls, settings): """Whether this LoginHandler is needed - and therefore whether the login page should be displayed.""" - return bool(cls.password_from_settings(settings) or settings.get('token')) + return bool(cls.password_from_settings(settings) or settings.get("token")) diff --git a/jupyter_server/auth/logout.py b/jupyter_server/auth/logout.py index 30e2732c45..1704967ea8 100644 --- a/jupyter_server/auth/logout.py +++ b/jupyter_server/auth/logout.py @@ -1,28 +1,18 @@ """Tornado handlers for logging out of the Jupyter Server. """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - from ..base.handlers import JupyterHandler class LogoutHandler(JupyterHandler): - def get(self): self.clear_login_cookie() if self.login_available: - message = { - 'info': 'Successfully logged out.' - } + message = {"info": "Successfully logged out."} else: - message = { - 'warning': 'Cannot log out. Jupyter Server authentication ' - 'is disabled.' - } - self.write(self.render_template('logout.html', - message=message)) + message = {"warning": "Cannot log out. Jupyter Server authentication " "is disabled."} + self.write(self.render_template("logout.html", message=message)) default_handlers = [(r"/logout", LogoutHandler)] - diff --git a/jupyter_server/auth/security.py b/jupyter_server/auth/security.py index 4e630c4daa..3c6bbad80e 100644 --- a/jupyter_server/auth/security.py +++ b/jupyter_server/auth/security.py @@ -1,8 +1,6 @@ """ Password generation for the Jupyter Server. """ - -from contextlib import contextmanager import getpass import hashlib import io @@ -11,17 +9,22 @@ import random import traceback import warnings +from contextlib import contextmanager -from ipython_genutils.py3compat import cast_bytes, str_to_bytes, cast_unicode -from traitlets.config import Config, ConfigFileNotFound, JSONFileConfigLoader +from ipython_genutils.py3compat import cast_bytes +from ipython_genutils.py3compat import cast_unicode +from ipython_genutils.py3compat import str_to_bytes from jupyter_core.paths import jupyter_config_dir +from traitlets.config import Config +from traitlets.config import ConfigFileNotFound +from traitlets.config import JSONFileConfigLoader # Length of the salt in nr of hex chars, which implies salt_len * 4 # bits of randomness. salt_len = 12 -def passwd(passphrase=None, algorithm='argon2'): +def passwd(passphrase=None, algorithm="argon2"): """Generate hashed password and salt for use in server configuration. In the server configuration, set `c.ServerApp.password` to @@ -49,18 +52,19 @@ def passwd(passphrase=None, algorithm='argon2'): """ if passphrase is None: for i in range(3): - p0 = getpass.getpass('Enter password: ') - p1 = getpass.getpass('Verify password: ') + p0 = getpass.getpass("Enter password: ") + p1 = getpass.getpass("Verify password: ") if p0 == p1: passphrase = p0 break else: - print('Passwords do not match.') + print("Passwords do not match.") else: - raise ValueError('No matching passwords found. Giving up.') + raise ValueError("No matching passwords found. Giving up.") - if algorithm == 'argon2': + if algorithm == "argon2": import argon2 + ph = argon2.PasswordHasher( memory_cost=10240, time_cost=10, @@ -68,13 +72,13 @@ def passwd(passphrase=None, algorithm='argon2'): ) h = ph.hash(passphrase) - return ':'.join((algorithm, cast_unicode(h, 'ascii'))) + return ":".join((algorithm, cast_unicode(h, "ascii"))) h = hashlib.new(algorithm) - salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) - h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii')) + salt = ("%0" + str(salt_len) + "x") % random.getrandbits(4 * salt_len) + h.update(cast_bytes(passphrase, "utf-8") + str_to_bytes(salt, "ascii")) - return ':'.join((algorithm, salt, h.hexdigest())) + return ":".join((algorithm, salt, h.hexdigest())) def passwd_check(hashed_passphrase, passphrase): @@ -105,9 +109,10 @@ def passwd_check(hashed_passphrase, passphrase): ... 'mypassword') True """ - if hashed_passphrase.startswith('argon2:'): + if hashed_passphrase.startswith("argon2:"): import argon2 import argon2.exceptions + ph = argon2.PasswordHasher() try: @@ -116,7 +121,7 @@ def passwd_check(hashed_passphrase, passphrase): return False try: - algorithm, salt, pw_digest = hashed_passphrase.split(':', 2) + algorithm, salt, pw_digest = hashed_passphrase.split(":", 2) except (ValueError, TypeError): return False @@ -128,10 +133,11 @@ def passwd_check(hashed_passphrase, passphrase): if len(pw_digest) == 0: return False - h.update(cast_bytes(passphrase, 'utf-8') + cast_bytes(salt, 'ascii')) + h.update(cast_bytes(passphrase, "utf-8") + cast_bytes(salt, "ascii")) return h.hexdigest() == pw_digest + @contextmanager def persist_config(config_file=None, mode=0o600): """Context manager that can be used to modify a config object @@ -141,7 +147,7 @@ def persist_config(config_file=None, mode=0o600): """ if config_file is None: - config_file = os.path.join(jupyter_config_dir(), 'jupyter_server_config.json') + config_file = os.path.join(jupyter_config_dir(), "jupyter_server_config.json") os.makedirs(os.path.dirname(config_file), exist_ok=True) @@ -153,15 +159,15 @@ def persist_config(config_file=None, mode=0o600): yield config - with io.open(config_file, 'w', encoding='utf8') as f: + with io.open(config_file, "w", encoding="utf8") as f: f.write(cast_unicode(json.dumps(config, indent=2))) try: os.chmod(config_file, mode) except Exception as e: tb = traceback.format_exc() - warnings.warn("Failed to set permissions on %s:\n%s" % (config_file, tb), - RuntimeWarning) + warnings.warn("Failed to set permissions on %s:\n%s" % (config_file, tb), RuntimeWarning) + def set_password(password=None, config_file=None): """Ask user for password, store it in JSON configuration file""" diff --git a/jupyter_server/base/handlers.py b/jupyter_server/base/handlers.py index 6877ea7284..a514c250da 100644 --- a/jupyter_server/base/handlers.py +++ b/jupyter_server/base/handlers.py @@ -1,8 +1,6 @@ """Base Tornado handlers for the Jupyter server.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import datetime import functools import ipaddress @@ -16,41 +14,50 @@ from http.client import responses from http.cookies import Morsel from urllib.parse import urlparse + +import prometheus_client +from ipython_genutils.path import filefind from jinja2 import TemplateNotFound -from tornado import web, escape, httputil +from jupyter_core.paths import is_hidden +from tornado import escape +from tornado import httputil +from tornado import web from tornado.log import app_log -import prometheus_client - -from jupyter_server._sysinfo import get_sys_info - from traitlets.config import Application -from ipython_genutils.path import filefind -from jupyter_core.paths import is_hidden import jupyter_server +from jupyter_server._sysinfo import get_sys_info from jupyter_server._tz import utcnow from jupyter_server.i18n import combine_translations -from jupyter_server.utils import ensure_async, url_path_join, url_is_absolute, url_escape, urldecode_unix_socket_path from jupyter_server.services.security import csp_report_uri +from jupyter_server.utils import ensure_async +from jupyter_server.utils import url_escape +from jupyter_server.utils import url_is_absolute +from jupyter_server.utils import url_path_join +from jupyter_server.utils import urldecode_unix_socket_path -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Top-level handlers -#----------------------------------------------------------------------------- -non_alphanum = re.compile(r'[^A-Za-z0-9]') +# ----------------------------------------------------------------------------- +non_alphanum = re.compile(r"[^A-Za-z0-9]") _sys_info_cache = None + + def json_sys_info(): global _sys_info_cache if _sys_info_cache is None: _sys_info_cache = json.dumps(get_sys_info()) return _sys_info_cache + def log(): if Application.initialized(): return Application.instance().log else: return app_log + class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" @@ -60,20 +67,23 @@ def content_security_policy(self): Can be overridden by defining Content-Security-Policy in settings['headers'] """ - if 'Content-Security-Policy' in self.settings.get('headers', {}): + if "Content-Security-Policy" in self.settings.get("headers", {}): # user-specified, don't override - return self.settings['headers']['Content-Security-Policy'] - - return '; '.join([ - "frame-ancestors 'self'", - # Make sure the report-uri is relative to the base_url - "report-uri " + self.settings.get('csp_report_uri', url_path_join(self.base_url, csp_report_uri)), - ]) + return self.settings["headers"]["Content-Security-Policy"] + + return "; ".join( + [ + "frame-ancestors 'self'", + # Make sure the report-uri is relative to the base_url + "report-uri " + + self.settings.get("csp_report_uri", url_path_join(self.base_url, csp_report_uri)), + ] + ) def set_default_headers(self): headers = {} headers["X-Content-Type-Options"] = "nosniff" - headers.update(self.settings.get('headers', {})) + headers.update(self.settings.get("headers", {})) headers["Content-Security-Policy"] = self.content_security_policy @@ -105,18 +115,18 @@ def force_clear_cookie(self, name, path="/", domain=None): expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) morsel = Morsel() - morsel.set(name, '', '""') - morsel['expires'] = httputil.format_timestamp(expires) - morsel['path'] = path + morsel.set(name, "", '""') + morsel["expires"] = httputil.format_timestamp(expires) + morsel["path"] = path if domain: - morsel['domain'] = domain + morsel["domain"] = domain self.add_header("Set-Cookie", morsel.OutputString()) def clear_login_cookie(self): - cookie_options = self.settings.get('cookie_options', {}) - path = cookie_options.setdefault('path', self.base_url) + cookie_options = self.settings.get("cookie_options", {}) + path = cookie_options.setdefault("path", self.base_url) self.clear_cookie(self.cookie_name, path=path) - if path and path != '/': + if path and path != "/": # also clear cookie on / to ensure old cookies are cleared # after the change in path behavior. # N.B. This bypasses the normal cookie handling, which can't update @@ -125,7 +135,7 @@ def clear_login_cookie(self): def get_current_user(self): if self.login_handler is None: - return 'anonymous' + return "anonymous" return self.login_handler.get_user(self) def skip_check_origin(self): @@ -134,42 +144,40 @@ def skip_check_origin(self): For example: in the default LoginHandler, if a request is token-authenticated, origin checking should be skipped. """ - if self.request.method == 'OPTIONS': + if self.request.method == "OPTIONS": # no origin-check on options requests, which are used to check origins! return True - if self.login_handler is None or not hasattr(self.login_handler, 'should_check_origin'): + if self.login_handler is None or not hasattr(self.login_handler, "should_check_origin"): return False return not self.login_handler.should_check_origin(self) @property def token_authenticated(self): """Have I been authenticated with a token?""" - if self.login_handler is None or not hasattr(self.login_handler, 'is_token_authenticated'): + if self.login_handler is None or not hasattr(self.login_handler, "is_token_authenticated"): return False return self.login_handler.is_token_authenticated(self) @property def cookie_name(self): - default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( - self.request.host - )) - return self.settings.get('cookie_name', default_cookie_name) + default_cookie_name = non_alphanum.sub("-", "username-{}".format(self.request.host)) + return self.settings.get("cookie_name", default_cookie_name) @property def logged_in(self): """Is a user currently logged in?""" user = self.get_current_user() - return (user and not user == 'anonymous') + return user and not user == "anonymous" @property def login_handler(self): """Return the login handler for this application, if any.""" - return self.settings.get('login_handler_class', None) + return self.settings.get("login_handler_class", None) @property def token(self): """Return the login token for this application, if any.""" - return self.settings.get('token', None) + return self.settings.get("token", None) @property def login_available(self): @@ -192,7 +200,7 @@ class JupyterHandler(AuthenticatedHandler): @property def config(self): - return self.settings.get('config', None) + return self.settings.get("config", None) @property def log(self): @@ -202,92 +210,93 @@ def log(self): @property def jinja_template_vars(self): """User-supplied values to supply to jinja templates.""" - return self.settings.get('jinja_template_vars', {}) + return self.settings.get("jinja_template_vars", {}) - #--------------------------------------------------------------- + # --------------------------------------------------------------- # URLs - #--------------------------------------------------------------- + # --------------------------------------------------------------- @property def version_hash(self): """The version hash to use for cache hints for static files""" - return self.settings.get('version_hash', '') + return self.settings.get("version_hash", "") @property def mathjax_url(self): - url = self.settings.get('mathjax_url', '') + url = self.settings.get("mathjax_url", "") if not url or url_is_absolute(url): return url return url_path_join(self.base_url, url) @property def mathjax_config(self): - return self.settings.get('mathjax_config', 'TeX-AMS-MML_HTMLorMML-full,Safe') + return self.settings.get("mathjax_config", "TeX-AMS-MML_HTMLorMML-full,Safe") @property def base_url(self): - return self.settings.get('base_url', '/') + return self.settings.get("base_url", "/") @property def default_url(self): - return self.settings.get('default_url', '') + return self.settings.get("default_url", "") @property def ws_url(self): - return self.settings.get('websocket_url', '') + return self.settings.get("websocket_url", "") @property def contents_js_source(self): - self.log.debug("Using contents: %s", self.settings.get('contents_js_source', - 'services/contents')) - return self.settings.get('contents_js_source', 'services/contents') + self.log.debug( + "Using contents: %s", self.settings.get("contents_js_source", "services/contents") + ) + return self.settings.get("contents_js_source", "services/contents") - #--------------------------------------------------------------- + # --------------------------------------------------------------- # Manager objects - #--------------------------------------------------------------- + # --------------------------------------------------------------- @property def kernel_manager(self): - return self.settings['kernel_manager'] + return self.settings["kernel_manager"] @property def contents_manager(self): - return self.settings['contents_manager'] + return self.settings["contents_manager"] @property def session_manager(self): - return self.settings['session_manager'] + return self.settings["session_manager"] @property def terminal_manager(self): - return self.settings['terminal_manager'] + return self.settings["terminal_manager"] @property def kernel_spec_manager(self): - return self.settings['kernel_spec_manager'] + return self.settings["kernel_spec_manager"] @property def config_manager(self): - return self.settings['config_manager'] + return self.settings["config_manager"] - #--------------------------------------------------------------- + # --------------------------------------------------------------- # CORS - #--------------------------------------------------------------- + # --------------------------------------------------------------- @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" - return self.settings.get('allow_origin', '') + return self.settings.get("allow_origin", "") @property def allow_origin_pat(self): """Regular expression version of allow_origin""" - return self.settings.get('allow_origin_pat', None) + return self.settings.get("allow_origin_pat", None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" - return self.settings.get('allow_credentials', False) + return self.settings.get("allow_credentials", False) def set_default_headers(self): """Add CORS headers, if defined""" @@ -298,18 +307,15 @@ def set_default_headers(self): origin = self.get_origin() if origin and self.allow_origin_pat.match(origin): self.set_header("Access-Control-Allow-Origin", origin) - elif ( - self.token_authenticated - and "Access-Control-Allow-Origin" not in - self.settings.get('headers', {}) + elif self.token_authenticated and "Access-Control-Allow-Origin" not in self.settings.get( + "headers", {} ): # allow token-authenticated requests cross-origin by default. # only apply this exception if allow-origin has not been specified. - self.set_header('Access-Control-Allow-Origin', - self.request.headers.get('Origin', '')) + self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin", "")) if self.allow_credentials: - self.set_header("Access-Control-Allow-Credentials", 'true') + self.set_header("Access-Control-Allow-Credentials", "true") def set_attachment_header(self, filename): """Set Content-Disposition: attachment header @@ -317,12 +323,12 @@ def set_attachment_header(self, filename): As a method to ensure handling of filename encoding """ escaped_filename = url_escape(filename) - self.set_header('Content-Disposition', - 'attachment;' - " filename*=utf-8''{utf8}" - .format( + self.set_header( + "Content-Disposition", + "attachment;" + " filename*=utf-8''{utf8}".format( utf8=escaped_filename, - ) + ), ) def get_origin(self): @@ -346,7 +352,7 @@ def check_origin(self, origin_to_satisfy_tornado=""): - allow unspecified host/origin (e.g. scripts) - allow token-authenticated requests """ - if self.allow_origin == '*' or self.skip_check_origin(): + if self.allow_origin == "*" or self.skip_check_origin(): return True host = self.request.headers.get("Host") @@ -377,8 +383,11 @@ def check_origin(self, origin_to_satisfy_tornado=""): # No CORS headers deny the request allow = False if not allow: - self.log.warning("Blocking Cross Origin API request for %s. Origin: %s, Host: %s", - self.request.path, origin, host, + self.log.warning( + "Blocking Cross Origin API request for %s. Origin: %s, Host: %s", + self.request.path, + origin, + host, ) return allow @@ -420,24 +429,27 @@ def check_referer(self): allow = False if not allow: - self.log.warning("Blocking Cross Origin request for %s. Referer: %s, Host: %s", - self.request.path, origin, host, + self.log.warning( + "Blocking Cross Origin request for %s. Referer: %s, Host: %s", + self.request.path, + origin, + host, ) return allow def check_xsrf_cookie(self): """Bypass xsrf cookie checks when token-authenticated""" - if self.token_authenticated or self.settings.get('disable_check_xsrf', False): + if self.token_authenticated or self.settings.get("disable_check_xsrf", False): # Token-authenticated requests do not need additional XSRF-check # Servers without authentication are vulnerable to XSRF return try: return super(JupyterHandler, self).check_xsrf_cookie() except web.HTTPError as e: - if self.request.method in {'GET', 'HEAD'}: + if self.request.method in {"GET", "HEAD"}: # Consider Referer a sufficient cross-origin check for GET requests if not self.check_referer(): - referer = self.request.headers.get('Referer') + referer = self.request.headers.get("Referer") if referer: msg = "Blocking Cross Origin request from {}.".format(referer) else: @@ -451,35 +463,38 @@ def check_host(self): Returns True if the request should continue, False otherwise. """ - if self.settings.get('allow_remote_access', False): + if self.settings.get("allow_remote_access", False): return True # Remove port (e.g. ':8888') from host - host = re.match(r'^(.*?)(:\d+)?$', self.request.host).group(1) + host = re.match(r"^(.*?)(:\d+)?$", self.request.host).group(1) # Browsers format IPv6 addresses like [::1]; we need to remove the [] - if host.startswith('[') and host.endswith(']'): + if host.startswith("[") and host.endswith("]"): host = host[1:-1] # UNIX socket handling check_host = urldecode_unix_socket_path(host) - if check_host.startswith('/') and os.path.exists(check_host): + if check_host.startswith("/") and os.path.exists(check_host): allow = True else: try: addr = ipaddress.ip_address(host) except ValueError: # Not an IP address: check against hostnames - allow = host in self.settings.get('local_hostnames', ['localhost']) + allow = host in self.settings.get("local_hostnames", ["localhost"]) else: allow = addr.is_loopback if not allow: self.log.warning( - ("Blocking request with non-local 'Host' %s (%s). " - "If the server should be accessible at that name, " - "set ServerApp.allow_remote_access to disable the check."), - host, self.request.host + ( + "Blocking request with non-local 'Host' %s (%s). " + "If the server should be accessible at that name, " + "set ServerApp.allow_remote_access to disable the check." + ), + host, + self.request.host, ) return allow @@ -488,13 +503,13 @@ def prepare(self): raise web.HTTPError(403) return super(JupyterHandler, self).prepare() - #--------------------------------------------------------------- + # --------------------------------------------------------------- # template rendering - #--------------------------------------------------------------- + # --------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" - return self.settings['jinja2_env'].get_template(name) + return self.settings["jinja2_env"].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) @@ -508,7 +523,7 @@ def template_namespace(self): default_url=self.default_url, ws_url=self.ws_url, logged_in=self.logged_in, - allow_password_change=self.settings.get('allow_password_change'), + allow_password_change=self.settings.get("allow_password_change"), login_available=self.login_available, token_available=bool(self.token), static_url=self.static_url, @@ -517,9 +532,10 @@ def template_namespace(self): version_hash=self.version_hash, xsrf_form_html=self.xsrf_form_html, token=self.token, - xsrf_token=self.xsrf_token.decode('utf8'), - nbjs_translations=json.dumps(combine_translations( - self.request.headers.get('Accept-Language', ''))), + xsrf_token=self.xsrf_token.decode("utf8"), + nbjs_translations=json.dumps( + combine_translations(self.request.headers.get("Accept-Language", "")) + ), **self.jinja_template_vars ) @@ -528,21 +544,21 @@ def get_json_body(self): if not self.request.body: return None # Do we need to call body.decode('utf-8') here? - body = self.request.body.strip().decode(u'utf-8') + body = self.request.body.strip().decode(u"utf-8") try: model = json.loads(body) except Exception as e: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) - raise web.HTTPError(400, u'Invalid JSON in body of request') from e + raise web.HTTPError(400, u"Invalid JSON in body of request") from e return model def write_error(self, status_code, **kwargs): """render custom error pages""" - exc_info = kwargs.get('exc_info') - message = '' - status_message = responses.get(status_code, 'Unknown HTTP Error') - exception = '(unknown)' + exc_info = kwargs.get("exc_info") + message = "" + status_message = responses.get(status_code, "Unknown HTTP Error") + exception = "(unknown)" if exc_info: exception = exc_info[1] # get the custom message, if defined @@ -552,7 +568,7 @@ def write_error(self, status_code, **kwargs): pass # construct the custom reason, if defined - reason = getattr(exception, 'reason', '') + reason = getattr(exception, "reason", "") if reason: status_message = reason @@ -564,12 +580,12 @@ def write_error(self, status_code, **kwargs): exception=exception, ) - self.set_header('Content-Type', 'text/html') + self.set_header("Content-Type", "text/html") # render the template try: - html = self.render_template('%s.html' % status_code, **ns) + html = self.render_template("%s.html" % status_code, **ns) except TemplateNotFound: - html = self.render_template('error.html', **ns) + html = self.render_template("error.html", **ns) self.write(html) @@ -584,28 +600,28 @@ def prepare(self): def write_error(self, status_code, **kwargs): """APIHandler errors are JSON, not human pages""" - self.set_header('Content-Type', 'application/json') - message = responses.get(status_code, 'Unknown HTTP Error') + self.set_header("Content-Type", "application/json") + message = responses.get(status_code, "Unknown HTTP Error") reply = { - 'message': message, + "message": message, } - exc_info = kwargs.get('exc_info') + exc_info = kwargs.get("exc_info") if exc_info: e = exc_info[1] if isinstance(e, HTTPError): - reply['message'] = e.log_message or message - reply['reason'] = e.reason + reply["message"] = e.log_message or message + reply["reason"] = e.reason else: - reply['message'] = 'Unhandled error' - reply['reason'] = None - reply['traceback'] = ''.join(traceback.format_exception(*exc_info)) - self.log.warning(reply['message']) + reply["message"] = "Unhandled error" + reply["reason"] = None + reply["traceback"] = "".join(traceback.format_exception(*exc_info)) + self.log.warning(reply["message"]) self.finish(json.dumps(reply)) def get_current_user(self): """Raise 403 on API handlers instead of redirecting to human login page""" # preserve _user_cache so we don't raise more than once - if hasattr(self, '_user_cache'): + if hasattr(self, "_user_cache"): return self._user_cache self._user_cache = user = super(APIHandler, self).get_current_user() return user @@ -620,10 +636,12 @@ def get_login_url(self): @property def content_security_policy(self): - csp = '; '.join([ + csp = "; ".join( + [ super(APIHandler, self).content_security_policy, "default-src 'none'", - ]) + ] + ) return csp # set _track_activity = False on API handlers that shouldn't track activity @@ -634,24 +652,27 @@ def update_api_activity(self): # record activity of authenticated requests if ( self._track_activity - and getattr(self, '_user_cache', None) - and self.get_argument('no_track_activity', None) is None + and getattr(self, "_user_cache", None) + and self.get_argument("no_track_activity", None) is None ): - self.settings['api_last_activity'] = utcnow() + self.settings["api_last_activity"] = utcnow() def finish(self, *args, **kwargs): self.update_api_activity() - self.set_header('Content-Type', 'application/json') + self.set_header("Content-Type", "application/json") return super(APIHandler, self).finish(*args, **kwargs) def options(self, *args, **kwargs): - if 'Access-Control-Allow-Headers' in self.settings.get('headers', {}): - self.set_header('Access-Control-Allow-Headers', self.settings['headers']['Access-Control-Allow-Headers']) + if "Access-Control-Allow-Headers" in self.settings.get("headers", {}): + self.set_header( + "Access-Control-Allow-Headers", + self.settings["headers"]["Access-Control-Allow-Headers"], + ) else: - self.set_header('Access-Control-Allow-Headers', - 'accept, content-type, authorization, x-xsrftoken') - self.set_header('Access-Control-Allow-Methods', - 'GET, PUT, POST, PATCH, DELETE, OPTIONS') + self.set_header( + "Access-Control-Allow-Headers", "accept, content-type, authorization, x-xsrftoken" + ) + self.set_header("Access-Control-Allow-Methods", "GET, PUT, POST, PATCH, DELETE, OPTIONS") # if authorization header is requested, # that means the request is token-authenticated. @@ -659,25 +680,29 @@ def options(self, *args, **kwargs): # only allow this exception if allow_origin has not been specified # and Jupyter server authentication is enabled. # If the token is not valid, the 'real' request will still be rejected. - requested_headers = self.request.headers.get('Access-Control-Request-Headers', '').split(',') - if requested_headers and any( - h.strip().lower() == 'authorization' - for h in requested_headers - ) and ( - # FIXME: it would be even better to check specifically for token-auth, - # but there is currently no API for this. - self.login_available - ) and ( - self.allow_origin - or self.allow_origin_pat - or 'Access-Control-Allow-Origin' in self.settings.get('headers', {}) + requested_headers = self.request.headers.get("Access-Control-Request-Headers", "").split( + "," + ) + if ( + requested_headers + and any(h.strip().lower() == "authorization" for h in requested_headers) + and ( + # FIXME: it would be even better to check specifically for token-auth, + # but there is currently no API for this. + self.login_available + ) + and ( + self.allow_origin + or self.allow_origin_pat + or "Access-Control-Allow-Origin" in self.settings.get("headers", {}) + ) ): - self.set_header('Access-Control-Allow-Origin', - self.request.headers.get('Origin', '')) + self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin", "")) class Template404(JupyterHandler): """Render our 404 template""" + def prepare(self): raise web.HTTPError(404) @@ -689,8 +714,10 @@ class AuthenticatedFileHandler(JupyterHandler, web.StaticFileHandler): def content_security_policy(self): # In case we're serving HTML/SVG, confine any Javascript to a unique # origin so it can't interact with the Jupyter server. - return super(AuthenticatedFileHandler, self).content_security_policy + \ - "; sandbox allow-scripts" + return ( + super(AuthenticatedFileHandler, self).content_security_policy + + "; sandbox allow-scripts" + ) @web.authenticated def head(self, path): @@ -699,24 +726,24 @@ def head(self, path): @web.authenticated def get(self, path): - if os.path.splitext(path)[1] == '.ipynb' or self.get_argument("download", False): - name = path.rsplit('/', 1)[-1] + if os.path.splitext(path)[1] == ".ipynb" or self.get_argument("download", False): + name = path.rsplit("/", 1)[-1] self.set_attachment_header(name) return web.StaticFileHandler.get(self, path) def get_content_type(self): - path = self.absolute_path.strip('/') - if '/' in path: - _, name = path.rsplit('/', 1) + path = self.absolute_path.strip("/") + if "/" in path: + _, name = path.rsplit("/", 1) else: name = path - if name.endswith('.ipynb'): - return 'application/x-ipynb+json' + if name.endswith(".ipynb"): + return "application/x-ipynb+json" else: cur_mime = mimetypes.guess_type(name)[0] - if cur_mime == 'text/plain': - return 'text/plain; charset=UTF-8' + if cur_mime == "text/plain": + return "text/plain; charset=UTF-8" else: return super(AuthenticatedFileHandler, self).get_content_type() @@ -739,10 +766,13 @@ def validate_absolute_path(self, root, absolute_path): abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root) and not self.contents_manager.allow_hidden: - self.log.info("Refusing to serve hidden file, via 404 Error, use flag 'ContentsManager.allow_hidden' to enable") + self.log.info( + "Refusing to serve hidden file, via 404 Error, use flag 'ContentsManager.allow_hidden' to enable" + ) raise web.HTTPError(404) return abs_path + def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. @@ -755,23 +785,28 @@ def json_errors(method): 2. Create and return a JSON body with a message field describing the error in a human readable form. """ - warnings.warn('@json_errors is deprecated in notebook 5.2.0. Subclass APIHandler instead.', + warnings.warn( + "@json_errors is deprecated in notebook 5.2.0. Subclass APIHandler instead.", DeprecationWarning, stacklevel=2, ) + @functools.wraps(method) def wrapper(self, *args, **kwargs): self.write_error = types.MethodType(APIHandler.write_error, self) return method(self, *args, **kwargs) + return wrapper -#----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- # File handler -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # to minimize subclass changes: HTTPError = web.HTTPError + class FileFindHandler(JupyterHandler, web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" @@ -781,8 +816,9 @@ class FileFindHandler(JupyterHandler, web.StaticFileHandler): def set_headers(self): super(FileFindHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings - if "v" not in self.request.arguments or \ - any(self.request.path.startswith(path) for path in self.no_cache_paths): + if "v" not in self.request.arguments or any( + self.request.path.startswith(path) for path in self.no_cache_paths + ): self.set_header("Cache-Control", "no-cache") def initialize(self, path, default_filename=None, no_cache_paths=None): @@ -791,9 +827,7 @@ def initialize(self, path, default_filename=None, no_cache_paths=None): if isinstance(path, str): path = [path] - self.root = tuple( - os.path.abspath(os.path.expanduser(p)) + os.sep for p in path - ) + self.root = tuple(os.path.abspath(os.path.expanduser(p)) + os.sep for p in path) self.default_filename = default_filename def compute_etag(self): @@ -809,16 +843,16 @@ def get_absolute_path(cls, roots, path): abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found - return '' + return "" cls._static_paths[path] = abspath - log().debug("Path %s served from %s"%(path, abspath)) + log().debug("Path %s served from %s" % (path, abspath)) return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" - if absolute_path == '': + if absolute_path == "": raise web.HTTPError(404) for root in self.root: @@ -829,7 +863,6 @@ def validate_absolute_path(self, root, absolute_path): class APIVersionHandler(APIHandler): - def get(self): # not authenticated, so give as few info as possible self.finish(json.dumps({"version": jupyter_server.__version__})) @@ -874,68 +907,71 @@ async def redirect_to_files(self, path): cm = self.contents_manager if await ensure_async(cm.dir_exists(path)): # it's a *directory*, redirect to /tree - url = url_path_join(self.base_url, 'tree', url_escape(path)) + url = url_path_join(self.base_url, "tree", url_escape(path)) else: orig_path = path # otherwise, redirect to /files - parts = path.split('/') + parts = path.split("/") - if not await ensure_async(cm.file_exists(path=path)) and 'files' in parts: + if not await ensure_async(cm.file_exists(path=path)) and "files" in parts: # redirect without files/ iff it would 404 # this preserves pre-2.0-style 'files/' links self.log.warning("Deprecated files/ URL: %s", orig_path) - parts.remove('files') - path = '/'.join(parts) + parts.remove("files") + path = "/".join(parts) if not await ensure_async(cm.file_exists(path=path)): raise web.HTTPError(404) - url = url_path_join(self.base_url, 'files', url_escape(path)) + url = url_path_join(self.base_url, "files", url_escape(path)) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) - def get(self, path=''): + def get(self, path=""): return self.redirect_to_files(self, path) class RedirectWithParams(web.RequestHandler): """Sam as web.RedirectHandler, but preserves URL parameters""" + def initialize(self, url, permanent=True): self._url = url self._permanent = permanent def get(self): - sep = '&' if '?' in self._url else '?' + sep = "&" if "?" in self._url else "?" url = sep.join([self._url, self.request.query]) self.redirect(url, permanent=self._permanent) + class PrometheusMetricsHandler(JupyterHandler): """ Return prometheus metrics for this notebook server """ + def get(self): - if self.settings['authenticate_prometheus'] and not self.logged_in: + if self.settings["authenticate_prometheus"] and not self.logged_in: raise web.HTTPError(403) - self.set_header('Content-Type', prometheus_client.CONTENT_TYPE_LATEST) + self.set_header("Content-Type", prometheus_client.CONTENT_TYPE_LATEST) self.write(prometheus_client.generate_latest(prometheus_client.REGISTRY)) -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL pattern fragments for re-use -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # path matches any number of `/foo[/bar...]` or just `/` or '' path_regex = r"(?P(?:(?:/[^/]+)+|/?))" -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler), (r"api", APIVersionHandler), - (r'/(robots\.txt|favicon\.ico)', web.StaticFileHandler), - (r'/metrics', PrometheusMetricsHandler) + (r"/(robots\.txt|favicon\.ico)", web.StaticFileHandler), + (r"/metrics", PrometheusMetricsHandler), ] diff --git a/jupyter_server/base/zmqhandlers.py b/jupyter_server/base/zmqhandlers.py index 839d4f38eb..b7571f97d0 100644 --- a/jupyter_server/base/zmqhandlers.py +++ b/jupyter_server/base/zmqhandlers.py @@ -1,21 +1,20 @@ # coding: utf-8 """Tornado handlers for WebSocket <-> ZMQ sockets.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import json import struct import sys -import tornado - from urllib.parse import urlparse -from tornado import ioloop, web -from tornado.websocket import WebSocketHandler -from jupyter_client.session import Session -from jupyter_client.jsonutil import date_default, extract_dates +import tornado from ipython_genutils.py3compat import cast_unicode +from jupyter_client.jsonutil import date_default +from jupyter_client.jsonutil import extract_dates +from jupyter_client.session import Session +from tornado import ioloop +from tornado import web +from tornado.websocket import WebSocketHandler from .handlers import JupyterHandler @@ -37,18 +36,18 @@ def serialize_binary_message(msg): """ # don't modify msg or buffer list in-place msg = msg.copy() - buffers = list(msg.pop('buffers')) + buffers = list(msg.pop("buffers")) if sys.version_info < (3, 4): buffers = [x.tobytes() for x in buffers] - bmsg = json.dumps(msg, default=date_default).encode('utf8') + bmsg = json.dumps(msg, default=date_default).encode("utf8") buffers.insert(0, bmsg) nbufs = len(buffers) offsets = [4 * (nbufs + 1)] for buf in buffers[:-1]: offsets.append(offsets[-1] + len(buf)) - offsets_buf = struct.pack('!' + 'I' * (nbufs + 1), nbufs, *offsets) + offsets_buf = struct.pack("!" + "I" * (nbufs + 1), nbufs, *offsets) buffers.insert(0, offsets_buf) - return b''.join(buffers) + return b"".join(buffers) def deserialize_binary_message(bmsg): @@ -65,24 +64,26 @@ def deserialize_binary_message(bmsg): ------- message dictionary """ - nbufs = struct.unpack('!i', bmsg[:4])[0] - offsets = list(struct.unpack('!' + 'I' * nbufs, bmsg[4:4*(nbufs+1)])) + nbufs = struct.unpack("!i", bmsg[:4])[0] + offsets = list(struct.unpack("!" + "I" * nbufs, bmsg[4 : 4 * (nbufs + 1)])) offsets.append(None) bufs = [] for start, stop in zip(offsets[:-1], offsets[1:]): bufs.append(bmsg[start:stop]) - msg = json.loads(bufs[0].decode('utf8')) - msg['header'] = extract_dates(msg['header']) - msg['parent_header'] = extract_dates(msg['parent_header']) - msg['buffers'] = bufs[1:] + msg = json.loads(bufs[0].decode("utf8")) + msg["header"] = extract_dates(msg["header"]) + msg["parent_header"] = extract_dates(msg["parent_header"]) + msg["buffers"] = bufs[1:] return msg + # ping interval for keeping websockets alive (30 seconds) WS_PING_INTERVAL = 30000 class WebSocketMixin(object): """Mixin for common websocket options""" + ping_callback = None last_ping = 0 last_pong = 0 @@ -94,7 +95,7 @@ def ping_interval(self): Set ws_ping_interval = 0 to disable pings. """ - return self.settings.get('ws_ping_interval', WS_PING_INTERVAL) + return self.settings.get("ws_ping_interval", WS_PING_INTERVAL) @property def ping_timeout(self): @@ -102,9 +103,7 @@ def ping_timeout(self): close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). Default is max of 3 pings or 30 seconds. """ - return self.settings.get('ws_ping_timeout', - max(3 * self.ping_interval, WS_PING_INTERVAL) - ) + return self.settings.get("ws_ping_timeout", max(3 * self.ping_interval, WS_PING_INTERVAL)) def check_origin(self, origin=None): """Check Origin == Host or Access-Control-Allow-Origin. @@ -112,8 +111,9 @@ def check_origin(self, origin=None): Tornado >= 4 calls this method automatically, raising 403 if it returns False. """ - if self.allow_origin == '*' or ( - hasattr(self, 'skip_check_origin') and self.skip_check_origin()): + if self.allow_origin == "*" or ( + hasattr(self, "skip_check_origin") and self.skip_check_origin() + ): return True host = self.request.headers.get("Host") @@ -140,8 +140,10 @@ def check_origin(self, origin=None): # No CORS headers deny the request allow = False if not allow: - self.log.warning("Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", - origin, host, + self.log.warning( + "Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", + origin, + host, ) return allow @@ -158,7 +160,8 @@ def open(self, *args, **kwargs): self.last_ping = loop.time() # Remember time of last ping self.last_pong = self.last_ping self.ping_callback = ioloop.PeriodicCallback( - self.send_ping, self.ping_interval, + self.send_ping, + self.ping_interval, ) self.ping_callback.start() return super(WebSocketMixin, self).open(*args, **kwargs) @@ -174,12 +177,12 @@ def send_ping(self): now = ioloop.IOLoop.current().time() since_last_pong = 1e3 * (now - self.last_pong) since_last_ping = 1e3 * (now - self.last_ping) - if since_last_ping < 2*self.ping_interval and since_last_pong > self.ping_timeout: + if since_last_ping < 2 * self.ping_interval and since_last_pong > self.ping_timeout: self.log.warning("WebSocket ping timeout after %i ms.", since_last_pong) self.close() return - self.ping(b'') + self.ping(b"") self.last_ping = now def on_pong(self, data): @@ -188,8 +191,9 @@ def on_pong(self, data): class ZMQStreamHandler(WebSocketMixin, WebSocketHandler): - if tornado.version_info < (4,1): + if tornado.version_info < (4, 1): """Backport send_error from tornado 4.1 to 4.0""" + def send_error(self, *args, **kwargs): if self.stream is None: super(WebSocketHandler, self).send_error(*args, **kwargs) @@ -200,7 +204,6 @@ def send_error(self, *args, **kwargs): # we can close the connection more gracefully. self.stream.close() - def _reserialize_reply(self, msg_or_list, channel=None): """Reserialize a reply message using JSON. @@ -219,8 +222,8 @@ def _reserialize_reply(self, msg_or_list, channel=None): idents, msg_list = self.session.feed_identities(msg_or_list) msg = self.session.deserialize(msg_list) if channel: - msg['channel'] = channel - if msg['buffers']: + msg["channel"] = channel + if msg["buffers"]: buf = serialize_binary_message(msg) return buf else: @@ -234,7 +237,7 @@ def _on_zmq_reply(self, stream, msg_list): self.log.warning("zmq message arrived on closed channel") self.close() return - channel = getattr(stream, 'channel', None) + channel = getattr(stream, "channel", None) try: msg = self._reserialize_reply(msg_list, channel=channel) except Exception: @@ -244,7 +247,6 @@ def _on_zmq_reply(self, stream, msg_list): class AuthenticatedZMQStreamHandler(ZMQStreamHandler, JupyterHandler): - def set_default_headers(self): """Undo the set_default_headers in JupyterHandler @@ -263,8 +265,8 @@ def pre_get(self): self.log.warning("Couldn't authenticate WebSocket connection") raise web.HTTPError(403) - if self.get_argument('session_id', False): - self.session.session = cast_unicode(self.get_argument('session_id')) + if self.get_argument("session_id", False): + self.session.session = cast_unicode(self.get_argument("session_id")) else: self.log.warning("No session ID specified") @@ -281,4 +283,4 @@ def initialize(self): self.session = Session(config=self.config) def get_compression_options(self): - return self.settings.get('websocket_compression_options', None) + return self.settings.get("websocket_compression_options", None) diff --git a/jupyter_server/config_manager.py b/jupyter_server/config_manager.py index 53c9852a90..94d613fedb 100644 --- a/jupyter_server/config_manager.py +++ b/jupyter_server/config_manager.py @@ -1,19 +1,18 @@ # coding: utf-8 """Manager to read and modify config data in JSON files.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - +import copy import errno import glob import io import json import os -import copy from six import PY3 from traitlets.config import LoggingConfigurable -from traitlets.traitlets import Unicode, Bool +from traitlets.traitlets import Bool +from traitlets.traitlets import Unicode def recursive_update(target, new): @@ -58,7 +57,7 @@ class BaseJSONConfigManager(LoggingConfigurable): default values in a {section_name}.d directory. """ - config_dir = Unicode('.') + config_dir = Unicode(".") read_directory = Bool(True) def ensure_config_dir_exists(self): @@ -71,11 +70,11 @@ def ensure_config_dir_exists(self): def file_name(self, section_name): """Returns the json filename for the section_name: {config_dir}/{section_name}.json""" - return os.path.join(self.config_dir, section_name+'.json') + return os.path.join(self.config_dir, section_name + ".json") def directory(self, section_name): """Returns the directory name for the section name: {config_dir}/{section_name}.d""" - return os.path.join(self.config_dir, section_name+'.d') + return os.path.join(self.config_dir, section_name + ".d") def get(self, section_name, include_root=True): """Retrieve the config data for the specified section. @@ -88,24 +87,25 @@ def get(self, section_name, include_root=True): """ paths = [self.file_name(section_name)] if include_root else [] if self.read_directory: - pattern = os.path.join(self.directory(section_name), '*.json') + pattern = os.path.join(self.directory(section_name), "*.json") # These json files should be processed first so that the # {section_name}.json take precedence. # The idea behind this is that installing a Python package may # put a json file somewhere in the a .d directory, while the # .json file is probably a user configuration. paths = sorted(glob.glob(pattern)) + paths - self.log.debug('Paths used for configuration of %s: \n\t%s', section_name, '\n\t'.join(paths)) + self.log.debug( + "Paths used for configuration of %s: \n\t%s", section_name, "\n\t".join(paths) + ) data = {} for path in paths: if os.path.isfile(path): - with io.open(path, encoding='utf-8') as f: + with io.open(path, encoding="utf-8") as f: recursive_update(data, json.load(f)) return data def set(self, section_name, data): - """Store the given config data. - """ + """Store the given config data.""" filename = self.file_name(section_name) self.ensure_config_dir_exists() @@ -119,9 +119,9 @@ def set(self, section_name, data): # in order to avoid writing half-finished corrupted data to disk. json_content = json.dumps(data, indent=2) if PY3: - f = io.open(filename, 'w', encoding='utf-8') + f = io.open(filename, "w", encoding="utf-8") else: - f = open(filename, 'wb') + f = open(filename, "wb") with f: f.write(json_content) diff --git a/jupyter_server/extension/application.py b/jupyter_server/extension/application.py index 73b633ec80..a0cd5c3551 100644 --- a/jupyter_server/extension/application.py +++ b/jupyter_server/extension/application.py @@ -1,27 +1,26 @@ -import sys -import re import logging +import re +import sys -from jinja2 import Environment, FileSystemLoader - -from traitlets.config import Config -from traitlets import ( - HasTraits, - Unicode, - List, - Dict, - Bool, - default -) +from jinja2 import Environment +from jinja2 import FileSystemLoader +from jupyter_core.application import JupyterApp +from jupyter_core.application import NoStart from tornado.log import LogFormatter from tornado.web import RedirectHandler +from traitlets import Bool +from traitlets import default +from traitlets import Dict +from traitlets import HasTraits +from traitlets import List +from traitlets import Unicode +from traitlets.config import Config -from jupyter_core.application import JupyterApp, NoStart - +from .handler import ExtensionHandlerMixin from jupyter_server.serverapp import ServerApp from jupyter_server.transutils import _i18n -from jupyter_server.utils import url_path_join, is_namespace_package -from .handler import ExtensionHandlerMixin +from jupyter_server.utils import is_namespace_package +from jupyter_server.utils import url_path_join # ----------------------------------------------------------------------------- # Util functions and classes. @@ -29,8 +28,7 @@ def _preparse_for_subcommand(Application, argv): - """Preparse command line to look for subcommands. - """ + """Preparse command line to look for subcommands.""" # Read in arguments from command line. if len(argv) == 0: return @@ -39,7 +37,7 @@ def _preparse_for_subcommand(Application, argv): if Application.subcommands and len(argv) > 0: # we have subcommands, and one may have been specified subc, subargv = argv[0], argv[1:] - if re.match(r'^\w(\-?\w)*$', subc) and subc in Application.subcommands: + if re.match(r"^\w(\-?\w)*$", subc) and subc in Application.subcommands: # it's a subcommand, and *not* a flag or class parameter app = Application() app.initialize_subcommand(subc, subargv) @@ -60,24 +58,24 @@ def _preparse_for_stopping_flags(Application, argv): # version), we want to only search the arguments up to the first # occurrence of '--', which we're calling interpreted_argv. try: - interpreted_argv = argv[:argv.index('--')] + interpreted_argv = argv[: argv.index("--")] except ValueError: interpreted_argv = argv # Catch any help calls. - if any(x in interpreted_argv for x in ('-h', '--help-all', '--help')): + if any(x in interpreted_argv for x in ("-h", "--help-all", "--help")): app = Application() - app.print_help('--help-all' in interpreted_argv) + app.print_help("--help-all" in interpreted_argv) app.exit(0) # Catch version commands - if '--version' in interpreted_argv or '-V' in interpreted_argv: + if "--version" in interpreted_argv or "-V" in interpreted_argv: app = Application() app.print_version() app.exit(0) # Catch generate-config commands. - if '--generate-config' in interpreted_argv: + if "--generate-config" in interpreted_argv: app = Application() app.write_default_config() app.exit(0) @@ -87,8 +85,10 @@ class ExtensionAppJinjaMixin(HasTraits): """Use Jinja templates for HTML templates on top of an ExtensionApp.""" jinja2_options = Dict( - help=_i18n("""Options to pass to the jinja2 environment for this - """) + help=_i18n( + """Options to pass to the jinja2 environment for this + """ + ) ).tag(config=True) def _prepare_templates(self): @@ -96,25 +96,19 @@ def _prepare_templates(self): self.initialize_templates() # Add templates to web app settings if extension has templates. if len(self.template_paths) > 0: - self.settings.update({ - "{}_template_paths".format(self.name): self.template_paths - }) + self.settings.update({"{}_template_paths".format(self.name): self.template_paths}) # Create a jinja environment for logging html templates. self.jinja2_env = Environment( loader=FileSystemLoader(self.template_paths), - extensions=['jinja2.ext.i18n'], + extensions=["jinja2.ext.i18n"], autoescape=True, **self.jinja2_options ) - # Add the jinja2 environment for this extension to the tornado settings. - self.settings.update( - { - "{}_jinja2_env".format(self.name): self.jinja2_env - } - ) + self.settings.update({"{}_jinja2_env".format(self.name): self.jinja2_env}) + # ----------------------------------------------------------------------------- # ExtensionApp @@ -124,6 +118,7 @@ def _prepare_templates(self): class JupyterServerExtensionException(Exception): """Exception class for raising for Server extensions errors.""" + # ----------------------------------------------------------------------------- # ExtensionApp # ----------------------------------------------------------------------------- @@ -140,6 +135,7 @@ class ExtensionApp(JupyterApp): class method. This method can be set as a entry_point in the extensions setup.py """ + # Subclasses should override this trait. Tells the server if # this extension allows other other extensions to be loaded # side-by-side when launched directly. @@ -162,7 +158,7 @@ class method. This method can be set as a entry_point in """ ).tag(config=True) - @default('open_browser') + @default("open_browser") def _default_open_browser(self): return self.serverapp.config["ServerApp"].get("open_browser", True) @@ -174,10 +170,10 @@ def _default_open_browser(self): @classmethod def get_extension_package(cls): - parts = cls.__module__.split('.') + parts = cls.__module__.split(".") if is_namespace_package(parts[0]): # in this case the package name is `.`. - return '.'.join(parts[0:2]) + return ".".join(parts[0:2]) return parts[0] @classmethod @@ -189,11 +185,11 @@ def get_extension_point(cls): default_url = Unicode().tag(config=True) - @default('default_url') + @default("default_url") def _default_url(self): return self.extension_url - file_url_prefix = Unicode('notebooks') + file_url_prefix = Unicode("notebooks") # Is this linked to a serverapp yet? _linked = Bool(False) @@ -208,11 +204,11 @@ def _default_url(self): _log_formatter_cls = LogFormatter - @default('log_level') + @default("log_level") def _default_log_level(self): return logging.INFO - @default('log_format') + @default("log_format") def _default_log_format(self): """override default log format to include date & time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" @@ -221,40 +217,38 @@ def _default_log_format(self): help="""Url where the static assets for the extension are served.""" ).tag(config=True) - @default('static_url_prefix') + @default("static_url_prefix") def _default_static_url_prefix(self): - static_url = "static/{name}/".format( - name=self.name - ) + static_url = "static/{name}/".format(name=self.name) return url_path_join(self.serverapp.base_url, static_url) - static_paths = List(Unicode(), + static_paths = List( + Unicode(), help="""paths to search for serving static files. This allows adding javascript/css to be available from the notebook server machine, or overriding individual files in the IPython - """ + """, ).tag(config=True) - template_paths = List(Unicode(), - help=_i18n("""Paths to search for serving jinja templates. + template_paths = List( + Unicode(), + help=_i18n( + """Paths to search for serving jinja templates. - Can be used to override templates from notebook.templates.""") + Can be used to override templates from notebook.templates.""" + ), ).tag(config=True) - settings = Dict( - help=_i18n("""Settings that will passed to the server.""") - ).tag(config=True) + settings = Dict(help=_i18n("""Settings that will passed to the server.""")).tag(config=True) - handlers = List( - help=_i18n("""Handlers appended to the server.""") - ).tag(config=True) + handlers = List(help=_i18n("""Handlers appended to the server.""")).tag(config=True) def _config_file_name_default(self): """The default config file name.""" if not self.name: - return '' - return 'jupyter_{}_config'.format(self.name.replace('-', '_')) + return "" + return "jupyter_{}_config".format(self.name.replace("-", "_")) def initialize_settings(self): """Override this method to add handling of settings.""" @@ -274,7 +268,7 @@ def _prepare_config(self): """ traits = self.class_own_traits().keys() self.extension_config = Config({t: getattr(self, t) for t in traits}) - self.settings['{}_config'.format(self.name)] = self.extension_config + self.settings["{}_config".format(self.name)] = self.extension_config def _prepare_settings(self): # Make webapp settings accessible to initialize_settings method @@ -282,10 +276,12 @@ def _prepare_settings(self): self.settings.update(**webapp.settings) # Add static and template paths to settings. - self.settings.update({ - "{}_static_paths".format(self.name): self.static_paths, - "{}".format(self.name): self, - }) + self.settings.update( + { + "{}_static_paths".format(self.name): self.static_paths, + "{}".format(self.name): self, + } + ) # Get setting defined by subclass using initialize_settings method. self.initialize_settings() @@ -303,13 +299,13 @@ def _prepare_handlers(self): new_handlers = [] for handler_items in self.handlers: # Build url pattern including base_url - pattern = url_path_join(webapp.settings['base_url'], handler_items[0]) + pattern = url_path_join(webapp.settings["base_url"], handler_items[0]) handler = handler_items[1] # Get handler kwargs, if given kwargs = {} if issubclass(handler, ExtensionHandlerMixin): - kwargs['name'] = self.name + kwargs["name"] = self.name try: kwargs.update(handler_items[2]) @@ -327,19 +323,17 @@ def _prepare_handlers(self): # Construct handler. handler = ( static_url, - webapp.settings['static_handler_class'], - {'path': self.static_paths} + webapp.settings["static_handler_class"], + {"path": self.static_paths}, ) new_handlers.append(handler) - webapp.add_handlers('.*$', new_handlers) + webapp.add_handlers(".*$", new_handlers) def _prepare_templates(self): # Add templates to web app settings if extension has templates. if len(self.template_paths) > 0: - self.settings.update({ - "{}_template_paths".format(self.name): self.template_paths - }) + self.settings.update({"{}_template_paths".format(self.name): self.template_paths}) self.initialize_templates() def _jupyter_server_config(self): @@ -347,7 +341,7 @@ def _jupyter_server_config(self): "ServerApp": { "default_url": self.default_url, "open_browser": self.open_browser, - "file_url_prefix": self.file_url_prefix + "file_url_prefix": self.file_url_prefix, } } base_config["ServerApp"].update(self.serverapp_config) @@ -424,8 +418,7 @@ async def stop_extension(self): """Cleanup any resources managed by this extension.""" def stop(self): - """Stop the underlying Jupyter server. - """ + """Stop the underlying Jupyter server.""" self.serverapp.stop() self.serverapp.clear_instance() @@ -447,32 +440,81 @@ def _load_jupyter_server_extension(cls, serverapp): @classmethod def load_classic_server_extension(cls, serverapp): - """Enables extension to be loaded as classic Notebook (jupyter/notebook) extension. - """ + """Enables extension to be loaded as classic Notebook (jupyter/notebook) extension.""" extension = cls() extension.serverapp = serverapp extension.load_config_file() extension.update_config(serverapp.config) extension.parse_command_line(serverapp.extra_args) # Add redirects to get favicons from old locations in the classic notebook server - extension.handlers.extend([ - (r"/static/favicons/favicon.ico", RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon.ico")}), - (r"/static/favicons/favicon-busy-1.ico", RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-1.ico")}), - (r"/static/favicons/favicon-busy-2.ico", RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-2.ico")}), - (r"/static/favicons/favicon-busy-3.ico", RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-3.ico")}), - (r"/static/favicons/favicon-file.ico", RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-file.ico")}), - (r"/static/favicons/favicon-notebook.ico", RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-notebook.ico")}), - (r"/static/favicons/favicon-terminal.ico", RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-terminal.ico")}), - (r"/static/logo/logo.png", RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/logo.png")}), - ]) + extension.handlers.extend( + [ + ( + r"/static/favicons/favicon.ico", + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/favicon.ico")}, + ), + ( + r"/static/favicons/favicon-busy-1.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-1.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-busy-2.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-2.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-busy-3.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-3.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-file.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-file.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-notebook.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-notebook.ico" + ) + }, + ), + ( + r"/static/favicons/favicon-terminal.ico", + RedirectHandler, + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-terminal.ico" + ) + }, + ), + ( + r"/static/logo/logo.png", + RedirectHandler, + {"url": url_path_join(serverapp.base_url, "static/base/images/logo.png")}, + ), + ] + ) extension.initialize() @classmethod @@ -486,12 +528,11 @@ def initialize_server(cls, argv=[], load_other_extensions=True, **kwargs): """ jpserver_extensions = {cls.get_extension_package(): True} find_extensions = cls.load_other_extensions - if 'jpserver_extensions' in cls.serverapp_config: - jpserver_extensions.update(cls.serverapp_config['jpserver_extensions']) - cls.serverapp_config['jpserver_extensions'] = jpserver_extensions + if "jpserver_extensions" in cls.serverapp_config: + jpserver_extensions.update(cls.serverapp_config["jpserver_extensions"]) + cls.serverapp_config["jpserver_extensions"] = jpserver_extensions find_extensions = False - serverapp = ServerApp.instance( - jpserver_extensions=jpserver_extensions, **kwargs) + serverapp = ServerApp.instance(jpserver_extensions=jpserver_extensions, **kwargs) serverapp.aliases.update(cls.aliases) serverapp.initialize( argv=argv, diff --git a/jupyter_server/extension/config.py b/jupyter_server/extension/config.py index 3091847abc..8fb6ceca21 100644 --- a/jupyter_server/extension/config.py +++ b/jupyter_server/extension/config.py @@ -1,4 +1,3 @@ - from jupyter_server.services.config.manager import ConfigManager @@ -10,25 +9,14 @@ class ExtensionConfigManager(ConfigManager): found in a `config.d` folder. It is assumed that all configuration files in this directory are JSON files. """ - def get_jpserver_extensions( - self, - section_name=DEFAULT_SECTION_NAME - ): + + def get_jpserver_extensions(self, section_name=DEFAULT_SECTION_NAME): """Return the jpserver_extensions field from all config files found.""" data = self.get(section_name) - return ( - data - .get("ServerApp", {}) - .get("jpserver_extensions", {}) - ) - - def enabled( - self, - name, - section_name=DEFAULT_SECTION_NAME, - include_root=True - ): + return data.get("ServerApp", {}).get("jpserver_extensions", {}) + + def enabled(self, name, section_name=DEFAULT_SECTION_NAME, include_root=True): """Is the extension enabled?""" extensions = self.get_jpserver_extensions(section_name) try: @@ -37,21 +25,9 @@ def enabled( return False def enable(self, name): - data = { - "ServerApp": { - "jpserver_extensions": { - name: True - } - } - } + data = {"ServerApp": {"jpserver_extensions": {name: True}}} self.update(name, data) def disable(self, name): - data = { - "ServerApp": { - "jpserver_extensions": { - name: False - } - } - } + data = {"ServerApp": {"jpserver_extensions": {name: False}}} self.update(name, data) diff --git a/jupyter_server/extension/handler.py b/jupyter_server/extension/handler.py index db615db911..4b7444001b 100644 --- a/jupyter_server/extension/handler.py +++ b/jupyter_server/extension/handler.py @@ -1,15 +1,17 @@ -from jupyter_server.base.handlers import FileFindHandler from jinja2.exceptions import TemplateNotFound +from jupyter_server.base.handlers import FileFindHandler + class ExtensionHandlerJinjaMixin: """Mixin class for ExtensionApp handlers that use jinja templating for template rendering. """ + def get_template(self, name): """Return the jinja template object for a given name""" try: - env = '{}_jinja2_env'.format(self.name) + env = "{}_jinja2_env".format(self.name) return self.settings[env].get_template(name) except TemplateNotFound: return super().get_template(name) @@ -25,6 +27,7 @@ class ExtensionHandlerMixin: their own namespace and avoid intercepting requests for other extensions. """ + def initialize(self, name): self.name = name @@ -39,7 +42,7 @@ def serverapp(self): @property def log(self): - if not hasattr(self, 'name'): + if not hasattr(self, "name"): return super().log # Attempt to pull the ExtensionApp's log, otherwise fall back to ServerApp. try: @@ -57,7 +60,7 @@ def server_config(self): @property def base_url(self): - return self.settings.get('base_url', '/') + return self.settings.get("base_url", "/") @property def static_url_prefix(self): @@ -65,7 +68,7 @@ def static_url_prefix(self): @property def static_path(self): - return self.settings['{}_static_paths'.format(self.name)] + return self.settings["{}_static_paths".format(self.name)] def static_url(self, path, include_host=None, **kwargs): """Returns a static URL for the given relative static file path. @@ -96,9 +99,7 @@ def static_url(self, path, include_host=None, **kwargs): else: raise e - get_url = self.settings.get( - "static_handler_class", FileFindHandler - ).make_static_url + get_url = self.settings.get("static_handler_class", FileFindHandler).make_static_url if include_host is None: include_host = getattr(self, "include_host", False) @@ -110,9 +111,6 @@ def static_url(self, path, include_host=None, **kwargs): # Hijack settings dict to send extension templates to extension # static directory. - settings = { - "static_path": self.static_path, - "static_url_prefix": self.static_url_prefix - } + settings = {"static_path": self.static_path, "static_url_prefix": self.static_url_prefix} return base + get_url(settings, path, **kwargs) diff --git a/jupyter_server/extension/manager.py b/jupyter_server/extension/manager.py index f9dcc2e8b6..a3a439383d 100644 --- a/jupyter_server/extension/manager.py +++ b/jupyter_server/extension/manager.py @@ -3,49 +3,43 @@ import traceback from tornado.gen import multi - +from traitlets import Any +from traitlets import Bool +from traitlets import default +from traitlets import Dict +from traitlets import HasTraits +from traitlets import Instance +from traitlets import observe +from traitlets import Unicode +from traitlets import validate as validate_trait from traitlets.config import LoggingConfigurable -from traitlets import ( - HasTraits, - Dict, - Unicode, - Bool, - Any, - Instance, - default, - observe, - validate as validate_trait, -) - from .config import ExtensionConfigManager -from .utils import ( - ExtensionMetadataError, - ExtensionModuleNotFound, - get_loader, - get_metadata, -) +from .utils import ExtensionMetadataError +from .utils import ExtensionModuleNotFound +from .utils import get_loader +from .utils import get_metadata class ExtensionPoint(HasTraits): """A simple API for connecting to a Jupyter Server extension point defined by metadata and importable from a Python package. """ + _linked = Bool(False) _app = Any(None, allow_none=True) metadata = Dict() - @validate_trait('metadata') + @validate_trait("metadata") def _valid_metadata(self, proposed): - metadata = proposed['value'] + metadata = proposed["value"] # Verify that the metadata has a "name" key. try: - self._module_name = metadata['module'] + self._module_name = metadata["module"] except KeyError: raise ExtensionMetadataError( - "There is no 'module' key in the extension's " - "metadata packet." + "There is no 'module' key in the extension's " "metadata packet." ) try: @@ -56,7 +50,7 @@ def _valid_metadata(self, proposed): "sure the extension is installed?".format(self._module_name) ) # If the metadata includes an ExtensionApp, create an instance. - if 'app' in metadata: + if "app" in metadata: self._app = metadata["app"]() return metadata @@ -106,8 +100,7 @@ def name(self): @property def module(self): - """The imported module (using importlib.import_module) - """ + """The imported module (using importlib.import_module)""" return self._module def _get_linker(self): @@ -117,9 +110,9 @@ def _get_linker(self): linker = getattr( self.module, # Search for a _link_jupyter_extension - '_link_jupyter_server_extension', + "_link_jupyter_server_extension", # Otherwise return a dummy function. - lambda serverapp: None + lambda serverapp: None, ) return linker @@ -170,6 +163,7 @@ class ExtensionPackage(HasTraits): ext_name = "my_extensions" extpkg = ExtensionPackage(name=ext_name) """ + name = Unicode(help="Name of the an importable Python package.") enabled = Bool(False).tag(config=True) @@ -182,7 +176,7 @@ def __init__(self, *args, **kwargs): @validate_trait("name") def _validate_name(self, proposed): - name = proposed['value'] + name = proposed["value"] self._extension_points = {} try: self._module, self._metadata = get_metadata(name) @@ -239,10 +233,7 @@ def link_all_points(self, serverapp): self.link_point(point_name, serverapp) def load_all_points(self, serverapp): - return [ - self.load_point(point_name, serverapp) - for point_name in self.extension_points - ] + return [self.load_point(point_name, serverapp) for point_name in self.extension_points] class ExtensionManager(LoggingConfigurable): @@ -296,21 +287,15 @@ def sorted_extensions(self): @property def extension_apps(self): - """Return mapping of extension names and sets of ExtensionApp objects. - """ + """Return mapping of extension names and sets of ExtensionApp objects.""" return { - name: { - point.app - for point in extension.extension_points.values() - if point.app - } + name: {point.app for point in extension.extension_points.values() if point.app} for name, extension in self.extensions.items() } @property def extension_points(self): - """Return mapping of extension point names and ExtensionPoint objects. - """ + """Return mapping of extension point names and ExtensionPoint objects.""" return { name: point for value in self.extensions.values() @@ -365,7 +350,11 @@ def load_extension(self, name, serverapp): points = extension.load_all_points(serverapp) except Exception as e: self.log.debug("".join(traceback.format_exception(*sys.exc_info()))) - self.log.warning("{name} | extension failed loading with message: {error}".format(name=name,error=str(e))) + self.log.warning( + "{name} | extension failed loading with message: {error}".format( + name=name, error=str(e) + ) + ) else: self.log.info("{name} | extension was successfully loaded.".format(name=name)) @@ -396,7 +385,9 @@ def load_all_extensions(self, serverapp): async def stop_all_extensions(self, serverapp): """Call the shutdown hooks in all extensions.""" - await multi([ - self.stop_extension(name, apps) - for name, apps in sorted(dict(self.extension_apps).items()) - ]) + await multi( + [ + self.stop_extension(name, apps) + for name, apps in sorted(dict(self.extension_apps).items()) + ] + ) diff --git a/jupyter_server/extension/serverextension.py b/jupyter_server/extension/serverextension.py index 74b8b28f0a..202d75e127 100644 --- a/jupyter_server/extension/serverextension.py +++ b/jupyter_server/extension/serverextension.py @@ -1,23 +1,21 @@ # coding: utf-8 """Utilities for installing extensions""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import os import sys + +from jupyter_core.application import JupyterApp +from jupyter_core.paths import ENV_CONFIG_PATH +from jupyter_core.paths import jupyter_config_dir +from jupyter_core.paths import SYSTEM_CONFIG_PATH from tornado.log import LogFormatter from traitlets import Bool -from jupyter_core.application import JupyterApp -from jupyter_core.paths import ( - jupyter_config_dir, - ENV_CONFIG_PATH, - SYSTEM_CONFIG_PATH -) from jupyter_server._version import __version__ from jupyter_server.extension.config import ExtensionConfigManager -from jupyter_server.extension.manager import ExtensionManager, ExtensionPackage +from jupyter_server.extension.manager import ExtensionManager +from jupyter_server.extension.manager import ExtensionPackage def _get_config_dir(user=False, sys_prefix=False): @@ -71,34 +69,49 @@ def _get_extmanager_for_context(write_dir="jupyter_server_config.d", user=False, class ArgumentConflict(ValueError): pass + _base_flags = {} _base_flags.update(JupyterApp.flags) _base_flags.pop("y", None) _base_flags.pop("generate-config", None) -_base_flags.update({ - "user" : ({ - "BaseExtensionApp" : { - "user" : True, - }}, "Apply the operation only for the given user" - ), - "system" : ({ - "BaseExtensionApp" : { - "user" : False, - "sys_prefix": False, - }}, "Apply the operation system-wide" - ), - "sys-prefix" : ({ - "BaseExtensionApp" : { - "sys_prefix" : True, - }}, "Use sys.prefix as the prefix for installing extensions (for environments, packaging)" - ), - "py" : ({ - "BaseExtensionApp" : { - "python" : True, - }}, "Install from a Python package" - ) -}) -_base_flags['python'] = _base_flags['py'] +_base_flags.update( + { + "user": ( + { + "BaseExtensionApp": { + "user": True, + } + }, + "Apply the operation only for the given user", + ), + "system": ( + { + "BaseExtensionApp": { + "user": False, + "sys_prefix": False, + } + }, + "Apply the operation system-wide", + ), + "sys-prefix": ( + { + "BaseExtensionApp": { + "sys_prefix": True, + } + }, + "Use sys.prefix as the prefix for installing extensions (for environments, packaging)", + ), + "py": ( + { + "BaseExtensionApp": { + "python": True, + } + }, + "Install from a Python package", + ), + } +) +_base_flags["python"] = _base_flags["py"] _base_aliases = {} _base_aliases.update(JupyterApp.aliases) @@ -106,6 +119,7 @@ class ArgumentConflict(ValueError): class BaseExtensionApp(JupyterApp): """Base extension installer app""" + _log_formatter_cls = LogFormatter flags = _base_flags aliases = _base_aliases @@ -126,10 +140,10 @@ def config_dir(self): # Constants for pretty print extension listing function. # Window doesn't support coloring in the commandline -GREEN_ENABLED = '\033[32menabled\033[0m' if os.name != 'nt' else 'enabled' -RED_DISABLED = '\033[31mdisabled\033[0m' if os.name != 'nt' else 'disabled' -GREEN_OK = '\033[32mOK\033[0m' if os.name != 'nt' else 'ok' -RED_X = '\033[31m X\033[0m' if os.name != 'nt' else ' X' +GREEN_ENABLED = "\033[32menabled\033[0m" if os.name != "nt" else "enabled" +RED_DISABLED = "\033[31mdisabled\033[0m" if os.name != "nt" else "disabled" +GREEN_OK = "\033[32mOK\033[0m" if os.name != "nt" else "ok" +RED_X = "\033[31m X\033[0m" if os.name != "nt" else " X" # ------------------------------------------------------------------------------ # Public API @@ -137,11 +151,7 @@ def config_dir(self): def toggle_server_extension_python( - import_name, - enabled=None, - parent=None, - user=False, - sys_prefix=True + import_name, enabled=None, parent=None, user=False, sys_prefix=True ): """Toggle the boolean setting for a given server extension in a Jupyter config file. @@ -150,13 +160,14 @@ def toggle_server_extension_python( config_dir = _get_config_dir(user=user, sys_prefix=sys_prefix) manager = ExtensionConfigManager( read_config_path=[config_dir], - write_config_dir=os.path.join(config_dir, "jupyter_server_config.d") + write_config_dir=os.path.join(config_dir, "jupyter_server_config.d"), ) if enabled: manager.enable(import_name) else: manager.disable(import_name) + # ---------------------------------------------------------------------- # Applications # ---------------------------------------------------------------------- @@ -165,42 +176,57 @@ def toggle_server_extension_python( flags.update(BaseExtensionApp.flags) flags.pop("y", None) flags.pop("generate-config", None) -flags.update({ - "user" : ({ - "ToggleServerExtensionApp" : { - "user" : True, - }}, "Perform the operation for the current user" - ), - "system" : ({ - "ToggleServerExtensionApp" : { - "user" : False, - "sys_prefix": False, - }}, "Perform the operation system-wide" - ), - "sys-prefix" : ({ - "ToggleServerExtensionApp" : { - "sys_prefix" : True, - }}, "Use sys.prefix as the prefix for installing server extensions" - ), - "py" : ({ - "ToggleServerExtensionApp" : { - "python" : True, - }}, "Install from a Python package" - ), -}) -flags['python'] = flags['py'] +flags.update( + { + "user": ( + { + "ToggleServerExtensionApp": { + "user": True, + } + }, + "Perform the operation for the current user", + ), + "system": ( + { + "ToggleServerExtensionApp": { + "user": False, + "sys_prefix": False, + } + }, + "Perform the operation system-wide", + ), + "sys-prefix": ( + { + "ToggleServerExtensionApp": { + "sys_prefix": True, + } + }, + "Use sys.prefix as the prefix for installing server extensions", + ), + "py": ( + { + "ToggleServerExtensionApp": { + "python": True, + } + }, + "Install from a Python package", + ), + } +) +flags["python"] = flags["py"] class ToggleServerExtensionApp(BaseExtensionApp): """A base class for enabling/disabling extensions""" + name = "jupyter server extension enable/disable" description = "Enable/disable a server extension using frontend configuration files." flags = flags _toggle_value = Bool() - _toggle_pre_message = '' - _toggle_post_message = '' + _toggle_pre_message = "" + _toggle_post_message = "" def toggle_server_extension(self, import_name): """Change the status of a named server extension. @@ -216,8 +242,7 @@ def toggle_server_extension(self, import_name): """ # Create an extension manager for this instance. config_dir, extension_manager = _get_extmanager_for_context( - user=self.user, - sys_prefix=self.sys_prefix + user=self.user, sys_prefix=self.sys_prefix ) try: self.log.info("{}: {}".format(self._toggle_pre_message.capitalize(), import_name)) @@ -245,13 +270,14 @@ def toggle_server_extension(self, import_name): def start(self): """Perform the App's actions as configured""" if not self.extra_args: - sys.exit('Please specify a server extension/package to enable or disable') + sys.exit("Please specify a server extension/package to enable or disable") for arg in self.extra_args: self.toggle_server_extension(arg) class EnableServerExtensionApp(ToggleServerExtensionApp): """An App that enables (and validates) Server Extensions""" + name = "jupyter server extension enable" description = """ Enable a server extension in configuration. @@ -266,6 +292,7 @@ class EnableServerExtensionApp(ToggleServerExtensionApp): class DisableServerExtensionApp(ToggleServerExtensionApp): """An App that disables Server Extensions""" + name = "jupyter server extension disable" description = """ Disable a server extension in configuration. @@ -280,6 +307,7 @@ class DisableServerExtensionApp(ToggleServerExtensionApp): class ListServerExtensionsApp(BaseExtensionApp): """An App that lists (and validates) Server Extensions""" + name = "jupyter server extension list" version = __version__ description = "List all server extensions known by the configuration system" @@ -292,7 +320,7 @@ def list_server_extensions(self): configurations = ( {"user": True, "sys_prefix": False}, {"user": False, "sys_prefix": True}, - {"user": False, "sys_prefix": False} + {"user": False, "sys_prefix": False}, ) for option in configurations: @@ -301,17 +329,13 @@ def list_server_extensions(self): for name, extension in ext_manager.extensions.items(): enabled = extension.enabled # Attempt to get extension metadata - self.log.info(u' {} {}'.format( - name, - GREEN_ENABLED if enabled else RED_DISABLED)) + self.log.info(u" {} {}".format(name, GREEN_ENABLED if enabled else RED_DISABLED)) try: self.log.info(" - Validating {}...".format(name)) if not extension.validate(): raise ValueError("validation failed") version = extension.version - self.log.info( - " {} {} {}".format(name, version, GREEN_OK) - ) + self.log.info(" {} {} {}".format(name, version, GREEN_OK)) except Exception as err: self.log.warn(" {} {}".format(RED_X, err)) # Add a blank line between paths. @@ -331,6 +355,7 @@ def start(self): class ServerExtensionApp(BaseExtensionApp): """Root level server extension app""" + name = "jupyter server extension" version = __version__ description = "Work with Jupyter server extensions" @@ -339,7 +364,7 @@ class ServerExtensionApp(BaseExtensionApp): subcommands = dict( enable=(EnableServerExtensionApp, "Enable a server extension"), disable=(DisableServerExtensionApp, "Disable a server extension"), - list=(ListServerExtensionsApp, "List server extensions") + list=(ListServerExtensionsApp, "List server extensions"), ) def start(self): @@ -355,5 +380,5 @@ def start(self): main = ServerExtensionApp.launch_instance -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/jupyter_server/extension/utils.py b/jupyter_server/extension/utils.py index 272f4b701a..afca32203c 100644 --- a/jupyter_server/extension/utils.py +++ b/jupyter_server/extension/utils.py @@ -26,14 +26,17 @@ def get_loader(obj, logger=None): underscore prefix. """ try: - func = getattr(obj, '_load_jupyter_server_extension') + func = getattr(obj, "_load_jupyter_server_extension") except AttributeError: - func = getattr(obj, 'load_jupyter_server_extension') - warnings.warn("A `_load_jupyter_server_extension` function was not " - "found in {name!s}. Instead, a `load_jupyter_server_extension` " - "function was found and will be used for now. This function " - "name will be deprecated in future releases " - "of Jupyter Server.".format(name=obj), DeprecationWarning) + func = getattr(obj, "load_jupyter_server_extension") + warnings.warn( + "A `_load_jupyter_server_extension` function was not " + "found in {name!s}. Instead, a `load_jupyter_server_extension` " + "function was found and will be used for now. This function " + "name will be deprecated in future releases " + "of Jupyter Server.".format(name=obj), + DeprecationWarning, + ) except Exception: raise ExtensionLoadingError("_load_jupyter_server_extension function was not found.") return func @@ -82,10 +85,7 @@ def get_metadata(package_name, logger=None): "for extension points in the extension pacakge's " "root.".format(name=package_name) ) - return module, [{ - "module": package_name, - "name": package_name - }] + return module, [{"module": package_name, "name": package_name}] def validate_extension(name): @@ -99,4 +99,5 @@ def validate_extension(name): If this works, nothing should happen. """ from .manager import ExtensionPackage + return ExtensionPackage(name=name) diff --git a/jupyter_server/files/handlers.py b/jupyter_server/files/handlers.py index 675af809e5..4190638817 100644 --- a/jupyter_server/files/handlers.py +++ b/jupyter_server/files/handlers.py @@ -1,15 +1,16 @@ """Serve files directly from the ContentsManager.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -import mimetypes import json +import mimetypes from base64 import decodebytes + from tornado import web + from jupyter_server.base.handlers import JupyterHandler from jupyter_server.utils import ensure_async + class FilesHandler(JupyterHandler): """serve files via ContentsManager @@ -23,8 +24,7 @@ class FilesHandler(JupyterHandler): def content_security_policy(self): # In case we're serving HTML/SVG, confine any Javascript to a unique # origin so it can't interact with the notebook server. - return super(FilesHandler, self).content_security_policy + \ - "; sandbox allow-scripts" + return super(FilesHandler, self).content_security_policy + "; sandbox allow-scripts" @web.authenticated def head(self, path): @@ -42,40 +42,40 @@ async def get(self, path, include_body=True): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) - path = path.strip('/') - if '/' in path: - _, name = path.rsplit('/', 1) + path = path.strip("/") + if "/" in path: + _, name = path.rsplit("/", 1) else: name = path - model = await ensure_async(cm.get(path, type='file', content=include_body)) + model = await ensure_async(cm.get(path, type="file", content=include_body)) if self.get_argument("download", False): self.set_attachment_header(name) # get mimetype from filename - if name.lower().endswith('.ipynb'): - self.set_header('Content-Type', 'application/x-ipynb+json') + if name.lower().endswith(".ipynb"): + self.set_header("Content-Type", "application/x-ipynb+json") else: cur_mime = mimetypes.guess_type(name)[0] - if cur_mime == 'text/plain': - self.set_header('Content-Type', 'text/plain; charset=UTF-8') + if cur_mime == "text/plain": + self.set_header("Content-Type", "text/plain; charset=UTF-8") elif cur_mime is not None: - self.set_header('Content-Type', cur_mime) + self.set_header("Content-Type", cur_mime) else: - if model['format'] == 'base64': - self.set_header('Content-Type', 'application/octet-stream') + if model["format"] == "base64": + self.set_header("Content-Type", "application/octet-stream") else: - self.set_header('Content-Type', 'text/plain; charset=UTF-8') + self.set_header("Content-Type", "text/plain; charset=UTF-8") if include_body: - if model['format'] == 'base64': - b64_bytes = model['content'].encode('ascii') + if model["format"] == "base64": + b64_bytes = model["content"].encode("ascii") self.write(decodebytes(b64_bytes)) - elif model['format'] == 'json': - self.write(json.dumps(model['content'])) + elif model["format"] == "json": + self.write(json.dumps(model["content"])) else: - self.write(model['content']) + self.write(model["content"]) self.flush() diff --git a/jupyter_server/gateway/gateway_client.py b/jupyter_server/gateway/gateway_client.py index 2df23025b2..50a6f74a0a 100644 --- a/jupyter_server/gateway/gateway_client.py +++ b/jupyter_server/gateway/gateway_client.py @@ -1,270 +1,356 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import json import os - from socket import gaierror + from tornado import web -from tornado.httpclient import AsyncHTTPClient, HTTPError -from traitlets import Unicode, Int, Float, Bool, default, validate, TraitError +from tornado.httpclient import AsyncHTTPClient +from tornado.httpclient import HTTPError +from traitlets import Bool +from traitlets import default +from traitlets import Float +from traitlets import Int +from traitlets import TraitError +from traitlets import Unicode +from traitlets import validate from traitlets.config import SingletonConfigurable class GatewayClient(SingletonConfigurable): """This class manages the configuration. It's its own singleton class so that we - can share these values across all objects. It also contains some helper methods - to build request arguments out of the various config options. + can share these values across all objects. It also contains some helper methods + to build request arguments out of the various config options. """ - url = Unicode(default_value=None, allow_none=True, config=True, + url = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The url of the Kernel or Enterprise Gateway server where kernel specifications are defined and kernel management takes place. If defined, this Notebook server acts as a proxy for all kernel management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) - """ + """, ) - url_env = 'JUPYTER_GATEWAY_URL' + url_env = "JUPYTER_GATEWAY_URL" - @default('url') + @default("url") def _url_default(self): return os.environ.get(self.url_env) - @validate('url') + @validate("url") def _url_validate(self, proposal): - value = proposal['value'] + value = proposal["value"] # Ensure value, if present, starts with 'http' if value is not None and len(value) > 0: - if not str(value).lower().startswith('http'): + if not str(value).lower().startswith("http"): raise TraitError("GatewayClient url must start with 'http': '%r'" % value) return value - ws_url = Unicode(default_value=None, allow_none=True, config=True, + ws_url = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) - """ + """, ) - ws_url_env = 'JUPYTER_GATEWAY_WS_URL' + ws_url_env = "JUPYTER_GATEWAY_WS_URL" - @default('ws_url') + @default("ws_url") def _ws_url_default(self): default_value = os.environ.get(self.ws_url_env) if default_value is None: if self.gateway_enabled: - default_value = self.url.lower().replace('http', 'ws') + default_value = self.url.lower().replace("http", "ws") return default_value - @validate('ws_url') + @validate("ws_url") def _ws_url_validate(self, proposal): - value = proposal['value'] + value = proposal["value"] # Ensure value, if present, starts with 'ws' if value is not None and len(value) > 0: - if not str(value).lower().startswith('ws'): + if not str(value).lower().startswith("ws"): raise TraitError("GatewayClient ws_url must start with 'ws': '%r'" % value) return value - kernels_endpoint_default_value = '/api/kernels' - kernels_endpoint_env = 'JUPYTER_GATEWAY_KERNELS_ENDPOINT' - kernels_endpoint = Unicode(default_value=kernels_endpoint_default_value, config=True, - help="""The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var)""") + kernels_endpoint_default_value = "/api/kernels" + kernels_endpoint_env = "JUPYTER_GATEWAY_KERNELS_ENDPOINT" + kernels_endpoint = Unicode( + default_value=kernels_endpoint_default_value, + config=True, + help="""The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var)""", + ) - @default('kernels_endpoint') + @default("kernels_endpoint") def _kernels_endpoint_default(self): return os.environ.get(self.kernels_endpoint_env, self.kernels_endpoint_default_value) - kernelspecs_endpoint_default_value = '/api/kernelspecs' - kernelspecs_endpoint_env = 'JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT' - kernelspecs_endpoint = Unicode(default_value=kernelspecs_endpoint_default_value, config=True, - help="""The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var)""") + kernelspecs_endpoint_default_value = "/api/kernelspecs" + kernelspecs_endpoint_env = "JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT" + kernelspecs_endpoint = Unicode( + default_value=kernelspecs_endpoint_default_value, + config=True, + help="""The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var)""", + ) - @default('kernelspecs_endpoint') + @default("kernelspecs_endpoint") def _kernelspecs_endpoint_default(self): - return os.environ.get(self.kernelspecs_endpoint_env, self.kernelspecs_endpoint_default_value) - - kernelspecs_resource_endpoint_default_value = '/kernelspecs' - kernelspecs_resource_endpoint_env = 'JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT' - kernelspecs_resource_endpoint = Unicode(default_value=kernelspecs_resource_endpoint_default_value, config=True, + return os.environ.get( + self.kernelspecs_endpoint_env, self.kernelspecs_endpoint_default_value + ) + + kernelspecs_resource_endpoint_default_value = "/kernelspecs" + kernelspecs_resource_endpoint_env = "JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT" + kernelspecs_resource_endpoint = Unicode( + default_value=kernelspecs_resource_endpoint_default_value, + config=True, help="""The gateway endpoint for accessing kernelspecs resources - (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""") + (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""", + ) - @default('kernelspecs_resource_endpoint') + @default("kernelspecs_resource_endpoint") def _kernelspecs_resource_endpoint_default(self): - return os.environ.get(self.kernelspecs_resource_endpoint_env, self.kernelspecs_resource_endpoint_default_value) + return os.environ.get( + self.kernelspecs_resource_endpoint_env, self.kernelspecs_resource_endpoint_default_value + ) connect_timeout_default_value = 40.0 - connect_timeout_env = 'JUPYTER_GATEWAY_CONNECT_TIMEOUT' - connect_timeout = Float(default_value=connect_timeout_default_value, config=True, + connect_timeout_env = "JUPYTER_GATEWAY_CONNECT_TIMEOUT" + connect_timeout = Float( + default_value=connect_timeout_default_value, + config=True, help="""The time allowed for HTTP connection establishment with the Gateway server. - (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""") + (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""", + ) - @default('connect_timeout') + @default("connect_timeout") def connect_timeout_default(self): - return float(os.environ.get('JUPYTER_GATEWAY_CONNECT_TIMEOUT', self.connect_timeout_default_value)) + return float( + os.environ.get("JUPYTER_GATEWAY_CONNECT_TIMEOUT", self.connect_timeout_default_value) + ) request_timeout_default_value = 40.0 - request_timeout_env = 'JUPYTER_GATEWAY_REQUEST_TIMEOUT' - request_timeout = Float(default_value=request_timeout_default_value, config=True, - help="""The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var)""") + request_timeout_env = "JUPYTER_GATEWAY_REQUEST_TIMEOUT" + request_timeout = Float( + default_value=request_timeout_default_value, + config=True, + help="""The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var)""", + ) - @default('request_timeout') + @default("request_timeout") def request_timeout_default(self): - return float(os.environ.get('JUPYTER_GATEWAY_REQUEST_TIMEOUT', self.request_timeout_default_value)) - - client_key = Unicode(default_value=None, allow_none=True, config=True, + return float( + os.environ.get("JUPYTER_GATEWAY_REQUEST_TIMEOUT", self.request_timeout_default_value) + ) + + client_key = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) - """ + """, ) - client_key_env = 'JUPYTER_GATEWAY_CLIENT_KEY' + client_key_env = "JUPYTER_GATEWAY_CLIENT_KEY" - @default('client_key') + @default("client_key") def _client_key_default(self): return os.environ.get(self.client_key_env) - client_cert = Unicode(default_value=None, allow_none=True, config=True, + client_cert = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT env var) - """ + """, ) - client_cert_env = 'JUPYTER_GATEWAY_CLIENT_CERT' + client_cert_env = "JUPYTER_GATEWAY_CLIENT_CERT" - @default('client_cert') + @default("client_cert") def _client_cert_default(self): return os.environ.get(self.client_cert_env) - ca_certs = Unicode(default_value=None, allow_none=True, config=True, + ca_certs = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The filename of CA certificates or None to use defaults. (JUPYTER_GATEWAY_CA_CERTS env var) - """ + """, ) - ca_certs_env = 'JUPYTER_GATEWAY_CA_CERTS' + ca_certs_env = "JUPYTER_GATEWAY_CA_CERTS" - @default('ca_certs') + @default("ca_certs") def _ca_certs_default(self): return os.environ.get(self.ca_certs_env) - http_user = Unicode(default_value=None, allow_none=True, config=True, + http_user = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) - """ + """, ) - http_user_env = 'JUPYTER_GATEWAY_HTTP_USER' + http_user_env = "JUPYTER_GATEWAY_HTTP_USER" - @default('http_user') + @default("http_user") def _http_user_default(self): return os.environ.get(self.http_user_env) - http_pwd = Unicode(default_value=None, allow_none=True, config=True, + http_pwd = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) - """ + """, ) - http_pwd_env = 'JUPYTER_GATEWAY_HTTP_PWD' + http_pwd_env = "JUPYTER_GATEWAY_HTTP_PWD" - @default('http_pwd') + @default("http_pwd") def _http_pwd_default(self): return os.environ.get(self.http_pwd_env) - headers_default_value = '{}' - headers_env = 'JUPYTER_GATEWAY_HEADERS' - headers = Unicode(default_value=headers_default_value, allow_none=True, config=True, + headers_default_value = "{}" + headers_env = "JUPYTER_GATEWAY_HEADERS" + headers = Unicode( + default_value=headers_default_value, + allow_none=True, + config=True, help="""Additional HTTP headers to pass on the request. This value will be converted to a dict. (JUPYTER_GATEWAY_HEADERS env var) - """ + """, ) - @default('headers') + @default("headers") def _headers_default(self): return os.environ.get(self.headers_env, self.headers_default_value) - auth_token = Unicode(default_value=None, allow_none=True, config=True, + auth_token = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The authorization token used in the HTTP headers. The header will be formatted as: { 'Authorization': '{auth_scheme} {auth_token}' } - (JUPYTER_GATEWAY_AUTH_TOKEN env var)""" + (JUPYTER_GATEWAY_AUTH_TOKEN env var)""", ) - auth_token_env = 'JUPYTER_GATEWAY_AUTH_TOKEN' + auth_token_env = "JUPYTER_GATEWAY_AUTH_TOKEN" - @default('auth_token') + @default("auth_token") def _auth_token_default(self): - return os.environ.get(self.auth_token_env, '') + return os.environ.get(self.auth_token_env, "") - auth_scheme = Unicode(default_value=None, allow_none=True, config=True, + auth_scheme = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The auth scheme, added as a prefix to the authorization token used in the HTTP headers. - (JUPYTER_GATEWAY_AUTH_SCHEME env var)""" + (JUPYTER_GATEWAY_AUTH_SCHEME env var)""", ) - auth_scheme_env = 'JUPYTER_GATEWAY_AUTH_SCHEME' + auth_scheme_env = "JUPYTER_GATEWAY_AUTH_SCHEME" - @default('auth_scheme') + @default("auth_scheme") def _auth_scheme_default(self): - return os.environ.get(self.auth_scheme_env, 'token') + return os.environ.get(self.auth_scheme_env, "token") validate_cert_default_value = True - validate_cert_env = 'JUPYTER_GATEWAY_VALIDATE_CERT' - validate_cert = Bool(default_value=validate_cert_default_value, config=True, + validate_cert_env = "JUPYTER_GATEWAY_VALIDATE_CERT" + validate_cert = Bool( + default_value=validate_cert_default_value, + config=True, help="""For HTTPS requests, determines if server's certificate should be validated or not. - (JUPYTER_GATEWAY_VALIDATE_CERT env var)""" + (JUPYTER_GATEWAY_VALIDATE_CERT env var)""", ) - @default('validate_cert') + @default("validate_cert") def validate_cert_default(self): - return bool(os.environ.get(self.validate_cert_env, str(self.validate_cert_default_value)) not in ['no', 'false']) + return bool( + os.environ.get(self.validate_cert_env, str(self.validate_cert_default_value)) + not in ["no", "false"] + ) def __init__(self, **kwargs): super().__init__(**kwargs) self._static_args = {} # initialized on first use - env_whitelist_default_value = '' - env_whitelist_env = 'JUPYTER_GATEWAY_ENV_WHITELIST' - env_whitelist = Unicode(default_value=env_whitelist_default_value, config=True, + env_whitelist_default_value = "" + env_whitelist_env = "JUPYTER_GATEWAY_ENV_WHITELIST" + env_whitelist = Unicode( + default_value=env_whitelist_default_value, + config=True, help="""A comma-separated list of environment variable names that will be included, along with their values, in the kernel startup request. The corresponding `env_whitelist` configuration value must also be set on the Gateway server - since that configuration value indicates which - environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)""") + environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)""", + ) - @default('env_whitelist') + @default("env_whitelist") def _env_whitelist_default(self): return os.environ.get(self.env_whitelist_env, self.env_whitelist_default_value) gateway_retry_interval_default_value = 1.0 - gateway_retry_interval_env = 'JUPYTER_GATEWAY_RETRY_INTERVAL' - gateway_retry_interval = Float(default_value=gateway_retry_interval_default_value, config=True, + gateway_retry_interval_env = "JUPYTER_GATEWAY_RETRY_INTERVAL" + gateway_retry_interval = Float( + default_value=gateway_retry_interval_default_value, + config=True, help="""The time allowed for HTTP reconnection with the Gateway server for the first time. Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor of numbers of retries but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. - (JUPYTER_GATEWAY_RETRY_INTERVAL env var)""") + (JUPYTER_GATEWAY_RETRY_INTERVAL env var)""", + ) - @default('gateway_retry_interval') + @default("gateway_retry_interval") def gateway_retry_interval_default(self): - return float(os.environ.get('JUPYTER_GATEWAY_RETRY_INTERVAL', self.gateway_retry_interval_default_value)) + return float( + os.environ.get( + "JUPYTER_GATEWAY_RETRY_INTERVAL", self.gateway_retry_interval_default_value + ) + ) gateway_retry_interval_max_default_value = 30.0 - gateway_retry_interval_max_env = 'JUPYTER_GATEWAY_RETRY_INTERVAL_MAX' - gateway_retry_interval_max = Float(default_value=gateway_retry_interval_max_default_value, config=True, + gateway_retry_interval_max_env = "JUPYTER_GATEWAY_RETRY_INTERVAL_MAX" + gateway_retry_interval_max = Float( + default_value=gateway_retry_interval_max_default_value, + config=True, help="""The maximum time allowed for HTTP reconnection retry with the Gateway server. - (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)""") + (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)""", + ) - @default('gateway_retry_interval_max') + @default("gateway_retry_interval_max") def gateway_retry_interval_max_default(self): - return float(os.environ.get('JUPYTER_GATEWAY_RETRY_INTERVAL_MAX', self.gateway_retry_interval_max_default_value)) + return float( + os.environ.get( + "JUPYTER_GATEWAY_RETRY_INTERVAL_MAX", self.gateway_retry_interval_max_default_value + ) + ) gateway_retry_max_default_value = 5 - gateway_retry_max_env = 'JUPYTER_GATEWAY_RETRY_MAX' - gateway_retry_max = Int(default_value=gateway_retry_max_default_value, config=True, + gateway_retry_max_env = "JUPYTER_GATEWAY_RETRY_MAX" + gateway_retry_max = Int( + default_value=gateway_retry_max_default_value, + config=True, help="""The maximum retries allowed for HTTP reconnection with the Gateway server. - (JUPYTER_GATEWAY_RETRY_MAX env var)""") + (JUPYTER_GATEWAY_RETRY_MAX env var)""", + ) - @default('gateway_retry_max') + @default("gateway_retry_max") def gateway_retry_max_default(self): - return int(os.environ.get('JUPYTER_GATEWAY_RETRY_MAX', self.gateway_retry_max_default_value)) + return int( + os.environ.get("JUPYTER_GATEWAY_RETRY_MAX", self.gateway_retry_max_default_value) + ) @property def gateway_enabled(self): return bool(self.url is not None and len(self.url) > 0) # Ensure KERNEL_LAUNCH_TIMEOUT has a default value. - KERNEL_LAUNCH_TIMEOUT = int(os.environ.get('KERNEL_LAUNCH_TIMEOUT', 40)) + KERNEL_LAUNCH_TIMEOUT = int(os.environ.get("KERNEL_LAUNCH_TIMEOUT", 40)) def init_static_args(self): """Initialize arguments used on every request. Since these are static values, we'll @@ -278,29 +364,29 @@ def init_static_args(self): elif self.request_timeout > float(GatewayClient.KERNEL_LAUNCH_TIMEOUT): GatewayClient.KERNEL_LAUNCH_TIMEOUT = int(self.request_timeout) # Ensure any adjustments are reflected in env. - os.environ['KERNEL_LAUNCH_TIMEOUT'] = str(GatewayClient.KERNEL_LAUNCH_TIMEOUT) - - self._static_args['headers'] = json.loads(self.headers) - if 'Authorization' not in self._static_args['headers'].keys(): - self._static_args['headers'].update({ - 'Authorization': '{} {}'.format(self.auth_scheme, self.auth_token) - }) - self._static_args['connect_timeout'] = self.connect_timeout - self._static_args['request_timeout'] = self.request_timeout - self._static_args['validate_cert'] = self.validate_cert + os.environ["KERNEL_LAUNCH_TIMEOUT"] = str(GatewayClient.KERNEL_LAUNCH_TIMEOUT) + + self._static_args["headers"] = json.loads(self.headers) + if "Authorization" not in self._static_args["headers"].keys(): + self._static_args["headers"].update( + {"Authorization": "{} {}".format(self.auth_scheme, self.auth_token)} + ) + self._static_args["connect_timeout"] = self.connect_timeout + self._static_args["request_timeout"] = self.request_timeout + self._static_args["validate_cert"] = self.validate_cert if self.client_cert: - self._static_args['client_cert'] = self.client_cert - self._static_args['client_key'] = self.client_key + self._static_args["client_cert"] = self.client_cert + self._static_args["client_key"] = self.client_key if self.ca_certs: - self._static_args['ca_certs'] = self.ca_certs + self._static_args["ca_certs"] = self.ca_certs if self.http_user: - self._static_args['auth_username'] = self.http_user + self._static_args["auth_username"] = self.http_user if self.http_pwd: - self._static_args['auth_password'] = self.http_pwd + self._static_args["auth_password"] = self.http_pwd def load_connection_args(self, **kwargs): """Merges the static args relative to the connection, with the given keyword arguments. If statics - have yet to be initialized, we'll do that here. + have yet to be initialized, we'll do that here. """ if len(self._static_args) == 0: @@ -321,17 +407,29 @@ async def gateway_request(endpoint, **kwargs): # NOTE: We do this here since this handler is called during the Notebook's startup and subsequent refreshes # of the tree view. except ConnectionRefusedError as e: - raise web.HTTPError(503, "Connection refused from Gateway server url '{}'. " - "Check to be sure the Gateway instance is running.".format(GatewayClient.instance().url)) from e + raise web.HTTPError( + 503, + "Connection refused from Gateway server url '{}'. " + "Check to be sure the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) from e except HTTPError as e: # This can occur if the host is valid (e.g., foo.com) but there's nothing there. - raise web.HTTPError(e.code, "Error attempting to connect to Gateway server url '{}'. " - "Ensure gateway url is valid and the Gateway instance is running.". - format(GatewayClient.instance().url)) from e + raise web.HTTPError( + e.code, + "Error attempting to connect to Gateway server url '{}'. " + "Ensure gateway url is valid and the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) from e except gaierror as e: - raise web.HTTPError(404, "The Gateway server specified in the gateway_url '{}' doesn't appear to be valid. " - "Ensure gateway url is valid and the Gateway instance is running.". - format(GatewayClient.instance().url)) from e + raise web.HTTPError( + 404, + "The Gateway server specified in the gateway_url '{}' doesn't appear to be valid. " + "Ensure gateway url is valid and the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) from e return response - diff --git a/jupyter_server/gateway/handlers.py b/jupyter_server/gateway/handlers.py index 453f3d4ba6..274d5a7084 100644 --- a/jupyter_server/gateway/handlers.py +++ b/jupyter_server/gateway/handlers.py @@ -1,30 +1,32 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -import os +import asyncio import logging import mimetypes +import os import random -import asyncio - -from ..base.handlers import APIHandler, JupyterHandler -from ..utils import url_path_join +from ipython_genutils.py3compat import cast_unicode +from jupyter_client.session import Session from tornado import web from tornado.concurrent import Future -from tornado.ioloop import IOLoop, PeriodicCallback -from tornado.websocket import WebSocketHandler, websocket_connect +from tornado.escape import json_decode +from tornado.escape import url_escape +from tornado.escape import utf8 from tornado.httpclient import HTTPRequest -from tornado.escape import url_escape, json_decode, utf8 - -from ipython_genutils.py3compat import cast_unicode -from jupyter_client.session import Session +from tornado.ioloop import IOLoop +from tornado.ioloop import PeriodicCallback +from tornado.websocket import websocket_connect +from tornado.websocket import WebSocketHandler from traitlets.config.configurable import LoggingConfigurable +from ..base.handlers import APIHandler +from ..base.handlers import JupyterHandler +from ..utils import url_path_join from .managers import GatewayClient # Keepalive ping interval (default: 30 seconds) -GATEWAY_WS_PING_INTERVAL_SECS = int(os.getenv('GATEWAY_WS_PING_INTERVAL_SECS', 30)) +GATEWAY_WS_PING_INTERVAL_SECS = int(os.getenv("GATEWAY_WS_PING_INTERVAL_SECS", 30)) class WebSocketChannelsHandler(WebSocketHandler, JupyterHandler): @@ -56,8 +58,8 @@ def authenticate(self): self.log.warning("Couldn't authenticate WebSocket connection") raise web.HTTPError(403) - if self.get_argument('session_id', False): - self.session.session = cast_unicode(self.get_argument('session_id')) + if self.get_argument("session_id", False): + self.session.session = cast_unicode(self.get_argument("session_id")) else: self.log.warning("No session ID specified") @@ -68,7 +70,7 @@ def initialize(self): async def get(self, kernel_id, *args, **kwargs): self.authenticate() - self.kernel_id = cast_unicode(kernel_id, 'ascii') + self.kernel_id = cast_unicode(kernel_id, "ascii") await super(WebSocketChannelsHandler, self).get(kernel_id=kernel_id, *args, **kwargs) def send_ping(self): @@ -76,7 +78,7 @@ def send_ping(self): self.ping_callback.stop() return - self.ping(b'') + self.ping(b"") def open(self, kernel_id, *args, **kwargs): """Handle web socket connection open to notebook server and delegate to gateway web socket handler """ @@ -86,7 +88,7 @@ def open(self, kernel_id, *args, **kwargs): self.gateway.on_open( kernel_id=kernel_id, message_callback=self.write_message, - compression_options=self.get_compression_options() + compression_options=self.get_compression_options(), ) def on_message(self, message): @@ -101,7 +103,11 @@ def write_message(self, message, binary=False): super(WebSocketChannelsHandler, self).write_message(message, binary=binary) elif self.log.isEnabledFor(logging.DEBUG): msg_summary = WebSocketChannelsHandler._get_message_summary(json_decode(utf8(message))) - self.log.debug("Notebook client closed websocket connection - message dropped: {}".format(msg_summary)) + self.log.debug( + "Notebook client closed websocket connection - message dropped: {}".format( + msg_summary + ) + ) def on_close(self): self.log.debug("Closing websocket connection %s", self.request.path) @@ -111,19 +117,23 @@ def on_close(self): @staticmethod def _get_message_summary(message): summary = [] - message_type = message['msg_type'] - summary.append('type: {}'.format(message_type)) - - if message_type == 'status': - summary.append(', state: {}'.format(message['content']['execution_state'])) - elif message_type == 'error': - summary.append(', {}:{}:{}'.format(message['content']['ename'], - message['content']['evalue'], - message['content']['traceback'])) + message_type = message["msg_type"] + summary.append("type: {}".format(message_type)) + + if message_type == "status": + summary.append(", state: {}".format(message["content"]["execution_state"])) + elif message_type == "error": + summary.append( + ", {}:{}:{}".format( + message["content"]["ename"], + message["content"]["evalue"], + message["content"]["traceback"], + ) + ) else: - summary.append(', ...') # don't display potentially sensitive data + summary.append(", ...") # don't display potentially sensitive data - return ''.join(summary) + return "".join(summary) class GatewayWebSocketClient(LoggingConfigurable): @@ -143,9 +153,11 @@ async def _connect(self, kernel_id, message_callback): self.kernel_id = kernel_id ws_url = url_path_join( GatewayClient.instance().ws_url, - GatewayClient.instance().kernels_endpoint, url_escape(kernel_id), 'channels' + GatewayClient.instance().kernels_endpoint, + url_escape(kernel_id), + "channels", ) - self.log.info('Connecting to {}'.format(ws_url)) + self.log.info("Connecting to {}".format(ws_url)) kwargs = {} kwargs = GatewayClient.instance().load_connection_args(**kwargs) @@ -154,20 +166,22 @@ async def _connect(self, kernel_id, message_callback): self.ws_future.add_done_callback(self._connection_done) loop = IOLoop.current() - loop.add_future( - self.ws_future, - lambda future: self._read_messages(message_callback) - ) + loop.add_future(self.ws_future, lambda future: self._read_messages(message_callback)) def _connection_done(self, fut): - if not self.disconnected and fut.exception() is None: # prevent concurrent.futures._base.CancelledError + if ( + not self.disconnected and fut.exception() is None + ): # prevent concurrent.futures._base.CancelledError self.ws = fut.result() self.retry = 0 self.log.debug("Connection is ready: ws: {}".format(self.ws)) else: - self.log.warning("Websocket connection has been closed via client disconnect or due to error. " - "Kernel with ID '{}' may not be terminated on GatewayClient: {}". - format(self.kernel_id, GatewayClient.instance().url)) + self.log.warning( + "Websocket connection has been closed via client disconnect or due to error. " + "Kernel with ID '{}' may not be terminated on GatewayClient: {}".format( + self.kernel_id, GatewayClient.instance().url + ) + ) def _disconnect(self): self.disconnected = True @@ -177,7 +191,9 @@ def _disconnect(self): elif not self.ws_future.done(): # Cancel pending connection. Since future.cancel() is a noop on tornado, we'll track cancellation locally self.ws_future.cancel() - self.log.debug("_disconnect: future cancelled, disconnected: {}".format(self.disconnected)) + self.log.debug( + "_disconnect: future cancelled, disconnected: {}".format(self.disconnected) + ) async def _read_messages(self, callback): """Read messages from gateway server.""" @@ -187,23 +203,37 @@ async def _read_messages(self, callback): try: message = await self.ws.read_message() except Exception as e: - self.log.error("Exception reading message from websocket: {}".format(e)) # , exc_info=True) + self.log.error( + "Exception reading message from websocket: {}".format(e) + ) # , exc_info=True) if message is None: if not self.disconnected: self.log.warning("Lost connection to Gateway: {}".format(self.kernel_id)) break - callback(message) # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) + callback( + message + ) # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) else: # ws cancelled - stop reading break # NOTE(esevan): if websocket is not disconnected by client, try to reconnect. if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max: jitter = random.randint(10, 100) * 0.01 - retry_interval = min(GatewayClient.instance().gateway_retry_interval * (2 ** self.retry), - GatewayClient.instance().gateway_retry_interval_max) + jitter + retry_interval = ( + min( + GatewayClient.instance().gateway_retry_interval * (2 ** self.retry), + GatewayClient.instance().gateway_retry_interval_max, + ) + + jitter + ) self.retry += 1 - self.log.info("Attempting to re-establish the connection to Gateway in %s secs (%s/%s): %s", - retry_interval, self.retry, GatewayClient.instance().gateway_retry_max, self.kernel_id) + self.log.info( + "Attempting to re-establish the connection to Gateway in %s secs (%s/%s): %s", + retry_interval, + self.retry, + GatewayClient.instance().gateway_retry_max, + self.kernel_id, + ) await asyncio.sleep(retry_interval) loop = IOLoop.current() loop.spawn_callback(self._connect, self.kernel_id, callback) @@ -217,10 +247,7 @@ def on_message(self, message): """Send message to gateway server.""" if self.ws is None: loop = IOLoop.current() - loop.add_future( - self.ws_future, - lambda future: self._write_message(message) - ) + loop.add_future(self.ws_future, lambda future: self._write_message(message)) else: self._write_message(message) @@ -230,7 +257,9 @@ def _write_message(self, message): if not self.disconnected and self.ws is not None: self.ws.write_message(message) except Exception as e: - self.log.error("Exception writing message to websocket: {}".format(e)) # , exc_info=True) + self.log.error( + "Exception writing message to websocket: {}".format(e) + ) # , exc_info=True) def on_close(self): """Web socket closed event.""" @@ -245,8 +274,10 @@ async def get(self, kernel_name, path, include_body=True): ksm = self.kernel_spec_manager kernel_spec_res = await ksm.get_kernel_spec_resource(kernel_name, path) if kernel_spec_res is None: - self.log.warning("Kernelspec resource '{}' for '{}' not found. Gateway may not support" - " resource serving.".format(path, kernel_name)) + self.log.warning( + "Kernelspec resource '{}' for '{}' not found. Gateway may not support" + " resource serving.".format(path, kernel_name) + ) else: self.set_header("Content-Type", mimetypes.guess_type(path)[0]) self.finish(kernel_spec_res) diff --git a/jupyter_server/gateway/managers.py b/jupyter_server/gateway/managers.py index 0fb1cb2234..b688c59b69 100644 --- a/jupyter_server/gateway/managers.py +++ b/jupyter_server/gateway/managers.py @@ -1,49 +1,57 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import datetime import json import os -import websocket +from logging import Logger +from queue import Queue +from threading import Thread +from typing import Dict +import websocket from jupyter_client.asynchronous.client import AsyncKernelClient from jupyter_client.clientabc import KernelClientABC from jupyter_client.kernelspec import KernelSpecManager from jupyter_client.manager import AsyncKernelManager from jupyter_client.managerabc import KernelManagerABC - -from logging import Logger -from queue import Queue -from threading import Thread from tornado import web -from tornado.escape import json_encode, json_decode, url_escape, utf8 -from traitlets import Instance, DottedObjectName, Type, default -from typing import Dict +from tornado.escape import json_decode +from tornado.escape import json_encode +from tornado.escape import url_escape +from tornado.escape import utf8 +from traitlets import default +from traitlets import DottedObjectName +from traitlets import Instance +from traitlets import Type -from .gateway_client import GatewayClient, gateway_request +from .._tz import UTC from ..services.kernels.kernelmanager import AsyncMappingKernelManager from ..services.sessions.sessionmanager import SessionManager -from ..utils import url_path_join, ensure_async -from .._tz import UTC +from ..utils import ensure_async +from ..utils import url_path_join +from .gateway_client import gateway_request +from .gateway_client import GatewayClient class GatewayMappingKernelManager(AsyncMappingKernelManager): """Kernel manager that supports remote kernels hosted by Jupyter Kernel or Enterprise Gateway.""" # We'll maintain our own set of kernel ids - _kernels: Dict[str, 'GatewayKernelManager'] = {} + _kernels: Dict[str, "GatewayKernelManager"] = {} - @default('kernel_manager_class') + @default("kernel_manager_class") def _default_kernel_manager_class(self): return "jupyter_server.gateway.managers.GatewayKernelManager" - @default('shared_context') + @default("shared_context") def _default_shared_context(self): return False # no need to share zmq contexts def __init__(self, **kwargs): super().__init__(**kwargs) - self.kernels_url = url_path_join(GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint) + self.kernels_url = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint + ) def remove_kernel(self, kernel_id): """Complete override since we want to be more tolerant of missing keys """ @@ -69,7 +77,7 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): if kernel_id is None: if path is not None: - kwargs['cwd'] = self.cwd_for_path(path) + kwargs["cwd"] = self.cwd_for_path(path) km = self.kernel_manager_factory(parent=self, log=self.log) await km.start_kernel(**kwargs) @@ -104,13 +112,13 @@ async def list_kernels(self, **kwargs): the kernels we're managing. """ self.log.debug(f"Request list kernels: {self.kernels_url}") - response = await gateway_request(self.kernels_url, method='GET') + response = await gateway_request(self.kernels_url, method="GET") kernels = json_decode(response.body) # Refresh our models to those we know about, and filter # the return value with only our kernels. kernel_models = {} for model in kernels: - kid = model['id'] + kid = model["id"] if kid in self._kernels: await self._kernels[kid].refresh_model(model) kernel_models[kid] = model @@ -176,21 +184,22 @@ async def cull_kernels(self): class GatewayKernelSpecManager(KernelSpecManager): - def __init__(self, **kwargs): super().__init__(**kwargs) - base_endpoint = url_path_join(GatewayClient.instance().url, - GatewayClient.instance().kernelspecs_endpoint) + base_endpoint = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernelspecs_endpoint + ) self.base_endpoint = GatewayKernelSpecManager._get_endpoint_for_user_filter(base_endpoint) - self.base_resource_endpoint = url_path_join(GatewayClient.instance().url, - GatewayClient.instance().kernelspecs_resource_endpoint) + self.base_resource_endpoint = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernelspecs_resource_endpoint + ) @staticmethod def _get_endpoint_for_user_filter(default_endpoint): - kernel_user = os.environ.get('KERNEL_USERNAME') + kernel_user = os.environ.get("KERNEL_USERNAME") if kernel_user: - return '?user='.join([default_endpoint, kernel_user]) + return "?user=".join([default_endpoint, kernel_user]) return default_endpoint def _get_kernelspecs_endpoint_url(self, kernel_name=None): @@ -213,20 +222,22 @@ async def get_all_specs(self): # caller of this method will still return this server's value until # the next fetch of kernelspecs - at which time they'll match. km = self.parent.kernel_manager - remote_default_kernel_name = fetched_kspecs.get('default') + remote_default_kernel_name = fetched_kspecs.get("default") if remote_default_kernel_name != km.default_kernel_name: - self.log.info(f"Default kernel name on Gateway server ({remote_default_kernel_name}) differs from " - f"Notebook server ({km.default_kernel_name}). Updating to Gateway server's value.") + self.log.info( + f"Default kernel name on Gateway server ({remote_default_kernel_name}) differs from " + f"Notebook server ({km.default_kernel_name}). Updating to Gateway server's value." + ) km.default_kernel_name = remote_default_kernel_name - remote_kspecs = fetched_kspecs.get('kernelspecs') + remote_kspecs = fetched_kspecs.get("kernelspecs") return remote_kspecs async def list_kernel_specs(self): """Get a list of kernel specs.""" kernel_spec_url = self._get_kernelspecs_endpoint_url() self.log.debug(f"Request list kernel specs at: {kernel_spec_url}") - response = await gateway_request(kernel_spec_url, method='GET') + response = await gateway_request(kernel_spec_url, method="GET") kernel_specs = json_decode(response.body) return kernel_specs @@ -241,14 +252,15 @@ async def get_kernel_spec(self, kernel_name, **kwargs): kernel_spec_url = self._get_kernelspecs_endpoint_url(kernel_name=str(kernel_name)) self.log.debug(f"Request kernel spec at: {kernel_spec_url}") try: - response = await gateway_request(kernel_spec_url, method='GET') + response = await gateway_request(kernel_spec_url, method="GET") except web.HTTPError as error: if error.status_code == 404: # Convert not found to KeyError since that's what the Notebook handler expects # message is not used, but might as well make it useful for troubleshooting raise KeyError( - 'kernelspec {kernel_name} not found on Gateway server at: {gateway_url}'. - format(kernel_name=kernel_name, gateway_url=GatewayClient.instance().url) + "kernelspec {kernel_name} not found on Gateway server at: {gateway_url}".format( + kernel_name=kernel_name, gateway_url=GatewayClient.instance().url + ) ) from error else: raise @@ -267,10 +279,12 @@ async def get_kernel_spec_resource(self, kernel_name, path): path : str The name of the desired resource """ - kernel_spec_resource_url = url_path_join(self.base_resource_endpoint, str(kernel_name), str(path)) + kernel_spec_resource_url = url_path_join( + self.base_resource_endpoint, str(kernel_name), str(path) + ) self.log.debug(f"Request kernel spec resource '{path}' at: {kernel_spec_resource_url}") try: - response = await gateway_request(kernel_spec_resource_url, method='GET') + response = await gateway_request(kernel_spec_resource_url, method="GET") except web.HTTPError as error: if error.status_code == 404: kernel_spec_resource = None @@ -282,7 +296,7 @@ async def get_kernel_spec_resource(self, kernel_name, path): class GatewaySessionManager(SessionManager): - kernel_manager = Instance('jupyter_server.gateway.managers.GatewayMappingKernelManager') + kernel_manager = Instance("jupyter_server.gateway.managers.GatewayMappingKernelManager") async def kernel_culled(self, kernel_id): """Checks if the kernel is still considered alive and returns true if its not found. """ @@ -304,13 +318,15 @@ class GatewayKernelManager(AsyncKernelManager): kernel_id = None kernel = None - @default('cache_ports') + @default("cache_ports") def _default_cache_ports(self): return False # no need to cache ports here def __init__(self, **kwargs): super().__init__(**kwargs) - self.kernels_url = url_path_join(GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint) + self.kernels_url = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint + ) self.kernel_url = self.kernel = self.kernel_id = None # simulate busy/activity markers: self.execution_state = self.last_activity = None @@ -320,8 +336,8 @@ def has_kernel(self): """Has a kernel been started that we are managing.""" return self.kernel is not None - client_class = DottedObjectName('jupyter_server.gateway.managers.GatewayKernelClient') - client_factory = Type(klass='jupyter_server.gateway.managers.GatewayKernelClient') + client_class = DottedObjectName("jupyter_server.gateway.managers.GatewayKernelClient") + client_factory = Type(klass="jupyter_server.gateway.managers.GatewayKernelClient") # -------------------------------------------------------------------------- # create a Client connected to our Kernel @@ -331,11 +347,13 @@ def client(self, **kwargs): """Create a client configured to connect to our kernel""" kw = {} kw.update(self.get_connection_info(session=True)) - kw.update(dict( - connection_file=self.connection_file, - parent=self, - )) - kw['kernel_id'] = self.kernel_id + kw.update( + dict( + connection_file=self.connection_file, + parent=self, + ) + ) + kw["kernel_id"] = self.kernel_id # add kwargs last, for manual overrides kw.update(kwargs) @@ -353,7 +371,7 @@ async def refresh_model(self, model=None): if model is None: self.log.debug("Request kernel at: %s" % self.kernel_url) try: - response = await gateway_request(self.kernel_url, method='GET') + response = await gateway_request(self.kernel_url, method="GET") except web.HTTPError as error: if error.status_code == 404: self.log.warning("Kernel not found at: %s" % self.kernel_url) @@ -366,14 +384,15 @@ async def refresh_model(self, model=None): if model: # Update activity markers self.last_activity = datetime.datetime.strptime( - model['last_activity'], '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=UTC) - self.execution_state = model['execution_state'] + model["last_activity"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(tzinfo=UTC) + self.execution_state = model["execution_state"] if isinstance(self.parent, AsyncMappingKernelManager): # Update connections only if there's a mapping kernel manager parent for # this kernel manager. The current kernel manager instance may not have # an parent instance if, say, a server extension is using another application # (e.g., papermill) that uses a KernelManager instance directly. - self.parent._kernel_connections[self.kernel_id] = int(model['connections']) + self.parent._kernel_connections[self.kernel_id] = int(model["connections"]) self.kernel = model return model @@ -391,32 +410,37 @@ async def start_kernel(self, **kwargs): keyword arguments that are passed down to build the kernel_cmd and launching the kernel (e.g. Popen kwargs). """ - kernel_id = kwargs.get('kernel_id') + kernel_id = kwargs.get("kernel_id") if kernel_id is None: - kernel_name = kwargs.get('kernel_name', 'python3') + kernel_name = kwargs.get("kernel_name", "python3") self.log.debug("Request new kernel at: %s" % self.kernels_url) # Let KERNEL_USERNAME take precedent over http_user config option. - if os.environ.get('KERNEL_USERNAME') is None and GatewayClient.instance().http_user: - os.environ['KERNEL_USERNAME'] = GatewayClient.instance().http_user + if os.environ.get("KERNEL_USERNAME") is None and GatewayClient.instance().http_user: + os.environ["KERNEL_USERNAME"] = GatewayClient.instance().http_user - kernel_env = {k: v for (k, v) in dict(os.environ).items() if k.startswith('KERNEL_') or - k in GatewayClient.instance().env_whitelist.split(",")} + kernel_env = { + k: v + for (k, v) in dict(os.environ).items() + if k.startswith("KERNEL_") or k in GatewayClient.instance().env_whitelist.split(",") + } # Add any env entries in this request - kernel_env.update(kwargs.get('env', {})) + kernel_env.update(kwargs.get("env", {})) # Convey the full path to where this notebook file is located. - if kwargs.get('cwd') is not None and kernel_env.get('KERNEL_WORKING_DIR') is None: - kernel_env['KERNEL_WORKING_DIR'] = kwargs['cwd'] + if kwargs.get("cwd") is not None and kernel_env.get("KERNEL_WORKING_DIR") is None: + kernel_env["KERNEL_WORKING_DIR"] = kwargs["cwd"] - json_body = json_encode({'name': kernel_name, 'env': kernel_env}) + json_body = json_encode({"name": kernel_name, "env": kernel_env}) - response = await gateway_request(self.kernels_url, method='POST', body=json_body) + response = await gateway_request(self.kernels_url, method="POST", body=json_body) self.kernel = json_decode(response.body) - self.kernel_id = self.kernel['id'] - self.log.info("GatewayKernelManager started kernel: {}, args: {}".format(self.kernel_id, kwargs)) + self.kernel_id = self.kernel["id"] + self.log.info( + "GatewayKernelManager started kernel: {}, args: {}".format(self.kernel_id, kwargs) + ) else: self.kernel_id = kernel_id self.kernel = await self.refresh_model() @@ -429,23 +453,23 @@ async def shutdown_kernel(self, now=False, restart=False): if self.has_kernel: self.log.debug("Request shutdown kernel at: %s", self.kernel_url) - response = await gateway_request(self.kernel_url, method='DELETE') + response = await gateway_request(self.kernel_url, method="DELETE") self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) async def restart_kernel(self, **kw): """Restarts a kernel via HTTP. """ if self.has_kernel: - kernel_url = self.kernel_url + '/restart' + kernel_url = self.kernel_url + "/restart" self.log.debug("Request restart kernel at: %s", kernel_url) - response = await gateway_request(kernel_url, method='POST', body=json_encode({})) + response = await gateway_request(kernel_url, method="POST", body=json_encode({})) self.log.debug("Restart kernel response: %d %s", response.code, response.reason) async def interrupt_kernel(self): """Interrupts the kernel via an HTTP request. """ if self.has_kernel: - kernel_url = self.kernel_url + '/interrupt' + kernel_url = self.kernel_url + "/interrupt" self.log.debug("Request interrupt kernel at: %s", kernel_url) - response = await gateway_request(kernel_url, method='POST', body=json_encode({})) + response = await gateway_request(kernel_url, method="POST", body=json_encode({})) self.log.debug("Interrupt kernel response: %d %s", response.code, response.reason) async def is_alive(self): @@ -476,17 +500,23 @@ def __init__(self, channel_name: str, channel_socket: websocket.WebSocket, log: self.log = log async def get_msg(self, *args, **kwargs) -> dict: - timeout = kwargs.get('timeout', 1) + timeout = kwargs.get("timeout", 1) msg = self.get(timeout=timeout) - self.log.debug("Received message on channel: {}, msg_id: {}, msg_type: {}". - format(self.channel_name, msg['msg_id'], msg['msg_type'] if msg else 'null')) + self.log.debug( + "Received message on channel: {}, msg_id: {}, msg_type: {}".format( + self.channel_name, msg["msg_id"], msg["msg_type"] if msg else "null" + ) + ) self.task_done() return msg def send(self, msg: dict) -> None: message = json.dumps(msg, default=ChannelQueue.serialize_datetime).replace(" None: msgs = [] while self.qsize(): msg = self.get_nowait() - if msg['msg_type'] != 'status': - msgs.append(msg['msg_type']) - if self.channel_name == 'iopub' and 'shutdown_reply' in msgs: + if msg["msg_type"] != "status": + msgs.append(msg["msg_type"]) + if self.channel_name == "iopub" and "shutdown_reply" in msgs: return if len(msgs): - self.log.warning("Stopping channel '{}' with {} unprocessed non-status messages: {}.". - format(self.channel_name, len(msgs), msgs)) + self.log.warning( + "Stopping channel '{}' with {} unprocessed non-status messages: {}.".format( + self.channel_name, len(msgs), msgs + ) + ) def is_alive(self) -> bool: return self.channel_socket is not None class HBChannelQueue(ChannelQueue): - def is_beating(self) -> bool: # Just use the is_alive status for now return self.is_alive() @@ -549,7 +581,7 @@ class GatewayKernelClient(AsyncKernelClient): def __init__(self, **kwargs): super().__init__(**kwargs) - self.kernel_id = kwargs['kernel_id'] + self.kernel_id = kwargs["kernel_id"] self.channel_socket = None self.response_router = None @@ -567,21 +599,28 @@ async def start_channels(self, shell=True, iopub=True, stdin=True, hb=True, cont ws_url = url_path_join( GatewayClient.instance().ws_url, - GatewayClient.instance().kernels_endpoint, url_escape(self.kernel_id), 'channels') + GatewayClient.instance().kernels_endpoint, + url_escape(self.kernel_id), + "channels", + ) # Gather cert info in case where ssl is desired... ssl_options = dict() - ssl_options['ca_certs'] = GatewayClient.instance().ca_certs - ssl_options['certfile'] = GatewayClient.instance().client_cert - ssl_options['keyfile'] = GatewayClient.instance().client_key - - self.channel_socket = websocket.create_connection(ws_url, - timeout=GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT, - enable_multithread=True, - sslopt=ssl_options) + ssl_options["ca_certs"] = GatewayClient.instance().ca_certs + ssl_options["certfile"] = GatewayClient.instance().client_cert + ssl_options["keyfile"] = GatewayClient.instance().client_key + + self.channel_socket = websocket.create_connection( + ws_url, + timeout=GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT, + enable_multithread=True, + sslopt=ssl_options, + ) self.response_router = Thread(target=self._route_responses) self.response_router.start() - await ensure_async(super().start_channels(shell=shell, iopub=iopub, stdin=stdin, hb=hb, control=control)) + await ensure_async( + super().start_channels(shell=shell, iopub=iopub, stdin=stdin, hb=hb, control=control) + ) def stop_channels(self): """Stops all the running channels for this kernel. @@ -607,8 +646,8 @@ def shell_channel(self): """Get the shell channel object for this kernel.""" if self._shell_channel is None: self.log.debug("creating shell channel queue") - self._shell_channel = ChannelQueue('shell', self.channel_socket, self.log) - self._channel_queues['shell'] = self._shell_channel + self._shell_channel = ChannelQueue("shell", self.channel_socket, self.log) + self._channel_queues["shell"] = self._shell_channel return self._shell_channel @property @@ -616,8 +655,8 @@ def iopub_channel(self): """Get the iopub channel object for this kernel.""" if self._iopub_channel is None: self.log.debug("creating iopub channel queue") - self._iopub_channel = ChannelQueue('iopub', self.channel_socket, self.log) - self._channel_queues['iopub'] = self._iopub_channel + self._iopub_channel = ChannelQueue("iopub", self.channel_socket, self.log) + self._channel_queues["iopub"] = self._iopub_channel return self._iopub_channel @property @@ -625,8 +664,8 @@ def stdin_channel(self): """Get the stdin channel object for this kernel.""" if self._stdin_channel is None: self.log.debug("creating stdin channel queue") - self._stdin_channel = ChannelQueue('stdin', self.channel_socket, self.log) - self._channel_queues['stdin'] = self._stdin_channel + self._stdin_channel = ChannelQueue("stdin", self.channel_socket, self.log) + self._channel_queues["stdin"] = self._stdin_channel return self._stdin_channel @property @@ -634,8 +673,8 @@ def hb_channel(self): """Get the hb channel object for this kernel.""" if self._hb_channel is None: self.log.debug("creating hb channel queue") - self._hb_channel = HBChannelQueue('hb', self.channel_socket, self.log) - self._channel_queues['hb'] = self._hb_channel + self._hb_channel = HBChannelQueue("hb", self.channel_socket, self.log) + self._channel_queues["hb"] = self._hb_channel return self._hb_channel @property @@ -643,8 +682,8 @@ def control_channel(self): """Get the control channel object for this kernel.""" if self._control_channel is None: self.log.debug("creating control channel queue") - self._control_channel = ChannelQueue('control', self.channel_socket, self.log) - self._channel_queues['control'] = self._control_channel + self._control_channel = ChannelQueue("control", self.channel_socket, self.log) + self._channel_queues["control"] = self._control_channel return self._control_channel def _route_responses(self): @@ -661,7 +700,7 @@ def _route_responses(self): if not raw_message: break response_message = json_decode(utf8(raw_message)) - channel = response_message['channel'] + channel = response_message["channel"] self._channel_queues[channel].put_nowait(response_message) except websocket.WebSocketConnectionClosedException: @@ -669,9 +708,9 @@ def _route_responses(self): except BaseException as be: if not self._channels_stopped: - self.log.warning('Unexpected exception encountered ({})'.format(be)) + self.log.warning("Unexpected exception encountered ({})".format(be)) - self.log.debug('Response router thread exiting...') + self.log.debug("Response router thread exiting...") KernelClientABC.register(GatewayKernelClient) diff --git a/jupyter_server/i18n/README.md b/jupyter_server/i18n/README.md index 8d1bb60335..17a475ce47 100644 --- a/jupyter_server/i18n/README.md +++ b/jupyter_server/i18n/README.md @@ -11,11 +11,11 @@ Howeverâ€Ļ ## How the language is selected ? 1. `jupyter notebook` command reads the `LANG` environment variable at startup, -(`xx_XX` or just `xx` form, where `xx` is the language code you're wanting to -run in). + (`xx_XX` or just `xx` form, where `xx` is the language code you're wanting to + run in). Hint: if running Windows, you can set it in PowerShell with `${Env:LANG} = "xx_XX"`. - if running Ubuntu 14, you should set environment variable `LANGUAGE="xx_XX"`. +if running Ubuntu 14, you should set environment variable `LANGUAGE="xx_XX"`. 2. The preferred language for web pages in your browser settings (`xx`) is also used. At the moment, it has to be first in the list. @@ -24,8 +24,8 @@ Hint: if running Windows, you can set it in PowerShell with `${Env:LANG} = "xx_X ### Requirements -- *pybabel* (could be installed `pip install babel`) -- *po2json* (could be installed with `npm install -g po2json`) +- _pybabel_ (could be installed `pip install babel`) +- _po2json_ (could be installed with `npm install -g po2json`) **All i18n-related commands are done from the related directory :** @@ -35,12 +35,12 @@ Hint: if running Windows, you can set it in PowerShell with `${Env:LANG} = "xx_X The translatable material for notebook is split into 3 `.pot` files, as follows: -- *notebook/i18n/notebook.pot* - Console and startup messages, basically anything that is - produced by Python code. -- *notebook/i18n/nbui.pot* - User interface strings, as extracted from the Jinja2 templates - in *notebook/templates/\*.html* -- *noteook/i18n/nbjs.pot* - JavaScript strings and dialogs, which contain much of the visible - user interface for Jupyter notebook. +- _notebook/i18n/notebook.pot_ - Console and startup messages, basically anything that is + produced by Python code. +- _notebook/i18n/nbui.pot_ - User interface strings, as extracted from the Jinja2 templates + in _notebook/templates/\*.html_ +- _noteook/i18n/nbjs.pot_ - JavaScript strings and dialogs, which contain much of the visible + user interface for Jupyter notebook. To extract the messages from the source code whenever new material is added, use the `pybabel` command: @@ -56,10 +56,10 @@ After this is complete you have 3 `.pot` files that you can give to a translator ### Messages compilation After the source material has been translated, you should have 3 `.po` files with the same base names -as the `.pot` files above. Put them in `notebook/i18n/${LANG}/LC_MESSAGES`, where `${LANG}` is the language +as the `.pot` files above. Put them in `notebook/i18n/${LANG}/LC_MESSAGES`, where `${LANG}` is the language code for your desired language ( i.e. German = "de", Japanese = "ja", etc. ). -*notebook.po* and *nbui.po* need to be converted from `.po` to `.mo` format for +_notebook.po_ and _nbui.po_ need to be converted from `.po` to `.mo` format for use at runtime. ```shell @@ -67,20 +67,20 @@ pybabel compile -D notebook -f -l ${LANG} -i ${LANG}/LC_MESSAGES/notebook.po -o pybabel compile -D nbui -f -l ${LANG} -i ${LANG}/LC_MESSAGES/nbui.po -o ${LANG}/LC_MESSAGES/nbui.mo ``` -*nbjs.po* needs to be converted to JSON for use within the JavaScript code, with *po2json*, as follows: +_nbjs.po_ needs to be converted to JSON for use within the JavaScript code, with _po2json_, as follows: po2json -p -F -f jed1.x -d nbjs ${LANG}/LC_MESSAGES/nbjs.po ${LANG}/LC_MESSAGES/nbjs.json -When new languages get added, their language codes should be added to *notebook/i18n/nbjs.json* +When new languages get added, their language codes should be added to _notebook/i18n/nbjs.json_ under the `supported_languages` element. ### Tips for Jupyter developers The biggest "mistake" I found while doing i18n enablement was the habit of constructing UI messages -from English "piece parts". For example, code like: +from English "piece parts". For example, code like: ```javascript -var msg = "Enter a new " + type + "name:" +var msg = "Enter a new " + type + "name:"; ``` where `type` is either "file", "directory", or "notebook".... @@ -91,40 +91,42 @@ as follows: ```javascript var rename_msg = function (type) { - switch(type) { - case 'file': return _("Enter a new file name:"); - case 'directory': return _("Enter a new directory name:"); - case 'notebook': return _("Enter a new notebook name:"); - default: return _("Enter a new name:"); - } -} + switch (type) { + case "file": + return _("Enter a new file name:"); + case "directory": + return _("Enter a new directory name:"); + case "notebook": + return _("Enter a new notebook name:"); + default: + return _("Enter a new name:"); + } +}; ``` Also you need to remember that adding an "s" or "es" to an English word to -create the plural form doesn't translate well. Some languages have as many as 5 or 6 different +create the plural form doesn't translate well. Some languages have as many as 5 or 6 different plural forms for differing numbers, so using an API such as ngettext() is necessary in order to handle these cases properly. ### Known issues and future evolutions -1. Right now there are two different places where the desired language is set. At startup time, the Jupyter console's messages pay attention to the setting of the `${LANG}` environment variable -as set in the shell at startup time. Unfortunately, this is also the time where the Jinja2 -environment is set up, which means that the template stuff will always come from this setting. -We really want to be paying attention to the browser's settings for the stuff that happens in the -browser, so we need to be able to retrieve this information after the browser is started and somehow -communicate this back to Jinja2. So far, I haven't yet figured out how to do this, which means that if the ${LANG} at startup doesn't match the browser's settings, you could potentially get a mix -of languages in the UI ( never a good thing ). +1. Right now there are two different places where the desired language is set. At startup time, the Jupyter console's messages pay attention to the setting of the `${LANG}` environment variable + as set in the shell at startup time. Unfortunately, this is also the time where the Jinja2 + environment is set up, which means that the template stuff will always come from this setting. + We really want to be paying attention to the browser's settings for the stuff that happens in the + browser, so we need to be able to retrieve this information after the browser is started and somehow + communicate this back to Jinja2. So far, I haven't yet figured out how to do this, which means that if the ${LANG} at startup doesn't match the browser's settings, you could potentially get a mix + of languages in the UI ( never a good thing ). -2. We will need to decide if console messages should be translatable, and enable them if desired. -3. The keyboard shortcut editor was implemented after the i18n work was completed, so that portion -does not have translation support at this time. -4. Babel's documentation has instructions on how to integrate messages extraction -into your *setup.py* so that eventually we can just do: +2. We will need to decide if console messages should be translatable, and enable them if desired. +3. The keyboard shortcut editor was implemented after the i18n work was completed, so that portion + does not have translation support at this time. +4. Babel's documentation has instructions on how to integrate messages extraction + into your _setup.py_ so that eventually we can just do: - ./setup.py extract_messages - -I hope to get this working at some point in the near future. -5. The conversions from `.po` to `.mo` probably can and should be done using `setup.py install`. + ./setup.py extract_messages +I hope to get this working at some point in the near future. 5. The conversions from `.po` to `.mo` probably can and should be done using `setup.py install`. Any questions or comments please let me know @JCEmmons on github (emmo@us.ibm.com) diff --git a/jupyter_server/i18n/__init__.py b/jupyter_server/i18n/__init__.py index 63fde70f06..2ffa7ad392 100644 --- a/jupyter_server/i18n/__init__.py +++ b/jupyter_server/i18n/__init__.py @@ -1,11 +1,12 @@ """Server functions for loading translations """ -from collections import defaultdict import errno import io import json -from os.path import dirname, join as pjoin import re +from collections import defaultdict +from os.path import dirname +from os.path import join as pjoin I18N_DIR = dirname(__file__) # Cache structure: @@ -15,14 +16,18 @@ # ... # } # }} -TRANSLATIONS_CACHE = {'nbjs': {}} +TRANSLATIONS_CACHE = {"nbjs": {}} -_accept_lang_re = re.compile(r''' +_accept_lang_re = re.compile( + r""" (?P[a-zA-Z]{1,8}(-[a-zA-Z]{1,8})?) (\s*;\s*q\s*=\s* (?P[01](.\d+)?) -)?''', re.VERBOSE) +)?""", + re.VERBOSE, +) + def parse_accept_lang_header(accept_lang): """Parses the 'Accept-Language' HTTP header. @@ -31,15 +36,15 @@ def parse_accept_lang_header(accept_lang): (with the most preferred language last). """ by_q = defaultdict(list) - for part in accept_lang.split(','): + for part in accept_lang.split(","): m = _accept_lang_re.match(part.strip()) if not m: continue - lang, qvalue = m.group('lang', 'qvalue') + lang, qvalue = m.group("lang", "qvalue") # Browser header format is zh-CN, gettext uses zh_CN - lang = lang.replace('-', '_') + lang = lang.replace("-", "_") if qvalue is None: - qvalue = 1. + qvalue = 1.0 else: qvalue = float(qvalue) if qvalue == 0: @@ -51,11 +56,11 @@ def parse_accept_lang_header(accept_lang): res.extend(sorted(langs)) return res -def load(language, domain='nbjs'): + +def load(language, domain="nbjs"): """Load translations from an nbjs.json file""" try: - f = io.open(pjoin(I18N_DIR, language, 'LC_MESSAGES', 'nbjs.json'), - encoding='utf-8') + f = io.open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") except IOError as e: if e.errno != errno.ENOENT: raise @@ -65,7 +70,8 @@ def load(language, domain='nbjs'): data = json.load(f) return data["locale_data"][domain] -def cached_load(language, domain='nbjs'): + +def cached_load(language, domain="nbjs"): """Load translations for one language, using in-memory cache if available""" domain_cache = TRANSLATIONS_CACHE[domain] try: @@ -75,7 +81,8 @@ def cached_load(language, domain='nbjs'): domain_cache[language] = data return data -def combine_translations(accept_language, domain='nbjs'): + +def combine_translations(accept_language, domain="nbjs"): """Combine translations for multiple accepted languages. Returns data re-packaged in jed1.x format. @@ -83,17 +90,12 @@ def combine_translations(accept_language, domain='nbjs'): lang_codes = parse_accept_lang_header(accept_language) combined = {} for language in lang_codes: - if language == 'en': + if language == "en": # en is default, all translations are in frontend. combined.clear() else: combined.update(cached_load(language, domain)) - combined[''] = {"domain":"nbjs"} + combined[""] = {"domain": "nbjs"} - return { - "domain": domain, - "locale_data": { - domain: combined - } - } + return {"domain": domain, "locale_data": {domain: combined}} diff --git a/jupyter_server/i18n/nbjs.json b/jupyter_server/i18n/nbjs.json index fd1a9701fa..a263a46a3e 100644 --- a/jupyter_server/i18n/nbjs.json +++ b/jupyter_server/i18n/nbjs.json @@ -1,13 +1,11 @@ -{ - "domain": "nbjs", - "supported_languages": [ - "zh-CN" - ], - "locale_data": { - "nbjs": { - "": { - "domain": "nbjs" - } - } - } -} +{ + "domain": "nbjs", + "supported_languages": ["zh-CN"], + "locale_data": { + "nbjs": { + "": { + "domain": "nbjs" + } + } + } +} diff --git a/jupyter_server/i18n/nbui.pot b/jupyter_server/i18n/nbui.pot index 9b91fd26ab..7769acffee 100644 --- a/jupyter_server/i18n/nbui.pot +++ b/jupyter_server/i18n/nbui.pot @@ -729,4 +729,3 @@ msgstr "" #: notebook/templates/tree.html:179 msgid "See 'IPython parallel' for installation details." msgstr "" - diff --git a/jupyter_server/i18n/notebook.pot b/jupyter_server/i18n/notebook.pot index 31a34d51e3..333b40d76c 100644 --- a/jupyter_server/i18n/notebook.pot +++ b/jupyter_server/i18n/notebook.pot @@ -440,4 +440,3 @@ msgstr "" #: jupyter_server/services/contents/manager.py:68 msgid "Untitled" msgstr "" - diff --git a/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po b/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po index 9bde659fd8..9baa2e4c66 100644 --- a/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po +++ b/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po @@ -729,4 +729,3 @@ msgstr "集įž¤æ ‡į­žįŽ°åœ¨į”ąIPythonåšļčĄŒæäž›." #: notebook/templates/tree.html:179 msgid "See 'IPython parallel' for installation details." msgstr "厉čŖ…įģ†čŠ‚æŸĨįœ‹ 'IPython parallel'." - diff --git a/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po b/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po index 5bcfefccfb..ee74a2097c 100644 --- a/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po +++ b/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po @@ -444,4 +444,3 @@ msgstr "启动notebooks 在æœŦåœ°čˇ¯åž„: %s" #: notebook/services/contents/manager.py:69 msgid "Untitled" msgstr "æœĒå‘Ŋ名" - diff --git a/jupyter_server/kernelspecs/handlers.py b/jupyter_server/kernelspecs/handlers.py index f53cdcd840..b940015934 100644 --- a/jupyter_server/kernelspecs/handlers.py +++ b/jupyter_server/kernelspecs/handlers.py @@ -1,13 +1,14 @@ from tornado import web + from ..base.handlers import JupyterHandler from ..services.kernelspecs.handlers import kernel_name_regex class KernelSpecResourceHandler(web.StaticFileHandler, JupyterHandler): - SUPPORTED_METHODS = ('GET', 'HEAD') + SUPPORTED_METHODS = ("GET", "HEAD") def initialize(self): - web.StaticFileHandler.initialize(self, path='') + web.StaticFileHandler.initialize(self, path="") @web.authenticated def get(self, kernel_name, path, include_body=True): @@ -15,8 +16,7 @@ def get(self, kernel_name, path, include_body=True): try: self.root = ksm.get_kernel_spec(kernel_name).resource_dir except KeyError as e: - raise web.HTTPError(404, u'Kernel spec %s not found' % - kernel_name) from e + raise web.HTTPError(404, u"Kernel spec %s not found" % kernel_name) from e self.log.debug("Serving kernel resource from: %s", self.root) return web.StaticFileHandler.get(self, path, include_body=include_body) @@ -24,7 +24,7 @@ def get(self, kernel_name, path, include_body=True): def head(self, kernel_name, path): return self.get(kernel_name, path, include_body=False) + default_handlers = [ (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, KernelSpecResourceHandler), ] - diff --git a/jupyter_server/log.py b/jupyter_server/log.py index 13638ac686..3fd63c70de 100644 --- a/jupyter_server/log.py +++ b/jupyter_server/log.py @@ -1,12 +1,13 @@ -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Copyright (c) Jupyter Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - +# ----------------------------------------------------------------------------- import json + from tornado.log import access_log + from .prometheus.log_functions import prometheus_log_method @@ -46,8 +47,8 @@ def log_request(handler): msg = "{status} {method} {uri} ({ip}) {request_time:.2f}ms" if status >= 400: # log bad referers - ns['referer'] = request.headers.get('Referer', 'None') - msg = msg + ' referer={referer}' + ns["referer"] = request.headers.get("Referer", "None") + msg = msg + " referer={referer}" if status >= 500 and status != 502: # log all headers if it caused an error log_method(json.dumps(dict(request.headers), indent=2)) diff --git a/jupyter_server/nbconvert/handlers.py b/jupyter_server/nbconvert/handlers.py index 85a48f9af6..7968e85514 100644 --- a/jupyter_server/nbconvert/handlers.py +++ b/jupyter_server/nbconvert/handlers.py @@ -1,24 +1,21 @@ """Tornado handlers for nbconvert.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import io import os import zipfile +from ipython_genutils import text +from ipython_genutils.py3compat import cast_bytes +from nbformat import from_dict from tornado import web from tornado.log import app_log -from ..base.handlers import ( - JupyterHandler, FilesRedirectHandler, - path_regex, -) +from ..base.handlers import FilesRedirectHandler +from ..base.handlers import JupyterHandler +from ..base.handlers import path_regex from jupyter_server.utils import ensure_async -from nbformat import from_dict -from ipython_genutils.py3compat import cast_bytes -from ipython_genutils import text def find_resource_files(output_files_dir): files = [] @@ -26,6 +23,7 @@ def find_resource_files(output_files_dir): files.extend([os.path.join(dirpath, f) for f in filenames]) return files + def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. @@ -33,21 +31,21 @@ def respond_zip(handler, name, output, resources): files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip - output_files = resources.get('outputs', None) + output_files = resources.get("outputs", None) if not output_files: return False # Headers - zip_filename = os.path.splitext(name)[0] + '.zip' + zip_filename = os.path.splitext(name)[0] + ".zip" handler.set_attachment_header(zip_filename) - handler.set_header('Content-Type', 'application/zip') - handler.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') + handler.set_header("Content-Type", "application/zip") + handler.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0") # Prepare the zip file buffer = io.BytesIO() - zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) - output_filename = os.path.splitext(name)[0] + resources['output_extension'] - zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) + zipf = zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) + output_filename = os.path.splitext(name)[0] + resources["output_extension"] + zipf.writestr(output_filename, cast_bytes(output, "utf-8")) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() @@ -55,6 +53,7 @@ def respond_zip(handler, name, output, resources): handler.finish(buffer.getvalue()) return True + def get_exporter(format, **kwargs): """get an exporter, raising appropriate errors""" # if this fails, will raise 500 @@ -78,52 +77,46 @@ def get_exporter(format, **kwargs): class NbconvertFileHandler(JupyterHandler): - SUPPORTED_METHODS = ('GET',) + SUPPORTED_METHODS = ("GET",) @web.authenticated async def get(self, format, path): self.check_xsrf_cookie() exporter = get_exporter(format, config=self.config, log=self.log) - path = path.strip('/') + path = path.strip("/") # If the notebook relates to a real file (default contents manager), # give its path to nbconvert. - if hasattr(self.contents_manager, '_get_os_path'): + if hasattr(self.contents_manager, "_get_os_path"): os_path = self.contents_manager._get_os_path(path) ext_resources_dir, basename = os.path.split(os_path) else: ext_resources_dir = None model = await ensure_async(self.contents_manager.get(path=path)) - name = model['name'] - if model['type'] != 'notebook': + name = model["name"] + if model["type"] != "notebook": # not a notebook, redirect to files return FilesRedirectHandler.redirect_to_files(self, path) - nb = model['content'] + nb = model["content"] - self.set_header('Last-Modified', model['last_modified']) + self.set_header("Last-Modified", model["last_modified"]) # create resources dictionary - mod_date = model['last_modified'].strftime(text.date_format) + mod_date = model["last_modified"].strftime(text.date_format) nb_title = os.path.splitext(name)[0] resource_dict = { - "metadata": { - "name": nb_title, - "modified_date": mod_date - }, - "config_dir": self.application.settings['config_dir'] + "metadata": {"name": nb_title, "modified_date": mod_date}, + "config_dir": self.application.settings["config_dir"], } if ext_resources_dir: - resource_dict['metadata']['path'] = ext_resources_dir + resource_dict["metadata"]["path"] = ext_resources_dir try: - output, resources = exporter.from_notebook_node( - nb, - resources=resource_dict - ) + output, resources = exporter.from_notebook_node(nb, resources=resource_dict) except Exception as e: self.log.exception("nbconvert failed: %s", e) raise web.HTTPError(500, "nbconvert failed: %s" % e) from e @@ -132,36 +125,40 @@ async def get(self, format, path): return # Force download if requested - if self.get_argument('download', 'false').lower() == 'true': - filename = os.path.splitext(name)[0] + resources['output_extension'] + if self.get_argument("download", "false").lower() == "true": + filename = os.path.splitext(name)[0] + resources["output_extension"] self.set_attachment_header(filename) # MIME type if exporter.output_mimetype: - self.set_header('Content-Type', - '%s; charset=utf-8' % exporter.output_mimetype) + self.set_header("Content-Type", "%s; charset=utf-8" % exporter.output_mimetype) - self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') + self.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0") self.finish(output) class NbconvertPostHandler(JupyterHandler): - SUPPORTED_METHODS = ('POST',) + SUPPORTED_METHODS = ("POST",) @web.authenticated def post(self, format): exporter = get_exporter(format, config=self.config) model = self.get_json_body() - name = model.get('name', 'notebook.ipynb') - nbnode = from_dict(model['content']) + name = model.get("name", "notebook.ipynb") + nbnode = from_dict(model["content"]) try: - output, resources = exporter.from_notebook_node(nbnode, resources={ - "metadata": {"name": name[:name.rfind('.')],}, - "config_dir": self.application.settings['config_dir'], - }) + output, resources = exporter.from_notebook_node( + nbnode, + resources={ + "metadata": { + "name": name[: name.rfind(".")], + }, + "config_dir": self.application.settings["config_dir"], + }, + ) except Exception as e: raise web.HTTPError(500, "nbconvert failed: %s" % e) from e @@ -170,21 +167,19 @@ def post(self, format): # MIME type if exporter.output_mimetype: - self.set_header('Content-Type', - '%s; charset=utf-8' % exporter.output_mimetype) + self.set_header("Content-Type", "%s; charset=utf-8" % exporter.output_mimetype) self.finish(output) -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _format_regex = r"(?P\w+)" default_handlers = [ (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), - (r"/nbconvert/%s%s" % (_format_regex, path_regex), - NbconvertFileHandler), + (r"/nbconvert/%s%s" % (_format_regex, path_regex), NbconvertFileHandler), ] diff --git a/jupyter_server/prometheus/log_functions.py b/jupyter_server/prometheus/log_functions.py index 6f13fc6eb8..1f36ade3e3 100644 --- a/jupyter_server/prometheus/log_functions.py +++ b/jupyter_server/prometheus/log_functions.py @@ -19,6 +19,6 @@ def prometheus_log_method(handler): """ HTTP_REQUEST_DURATION_SECONDS.labels( method=handler.request.method, - handler='{}.{}'.format(handler.__class__.__module__, type(handler).__name__), - status_code=handler.get_status() + handler="{}.{}".format(handler.__class__.__module__, type(handler).__name__), + status_code=handler.get_status(), ).observe(handler.request.request_time()) diff --git a/jupyter_server/prometheus/metrics.py b/jupyter_server/prometheus/metrics.py index 3ebe4d074d..a7f200b7ca 100644 --- a/jupyter_server/prometheus/metrics.py +++ b/jupyter_server/prometheus/metrics.py @@ -10,25 +10,29 @@ # Try to de-duplicate by using the ones in Notebook if available. # See https://github.com/jupyter/jupyter_server/issues/209 # pylint: disable=unused-import - from notebook.prometheus.metrics import HTTP_REQUEST_DURATION_SECONDS,TERMINAL_CURRENTLY_RUNNING_TOTAL, KERNEL_CURRENTLY_RUNNING_TOTAL + from notebook.prometheus.metrics import ( + HTTP_REQUEST_DURATION_SECONDS, + TERMINAL_CURRENTLY_RUNNING_TOTAL, + KERNEL_CURRENTLY_RUNNING_TOTAL, + ) except ImportError: from prometheus_client import Histogram, Gauge HTTP_REQUEST_DURATION_SECONDS = Histogram( - 'http_request_duration_seconds', - 'duration in seconds for all HTTP requests', - ['method', 'handler', 'status_code'], + "http_request_duration_seconds", + "duration in seconds for all HTTP requests", + ["method", "handler", "status_code"], ) TERMINAL_CURRENTLY_RUNNING_TOTAL = Gauge( - 'terminal_currently_running_total', - 'counter for how many terminals are running', + "terminal_currently_running_total", + "counter for how many terminals are running", ) KERNEL_CURRENTLY_RUNNING_TOTAL = Gauge( - 'kernel_currently_running_total', - 'counter for how many kernels are running labeled by type', - ['type'] + "kernel_currently_running_total", + "counter for how many kernels are running labeled by type", + ["type"], ) diff --git a/jupyter_server/pytest_plugin.py b/jupyter_server/pytest_plugin.py index 00a7c8fc86..73a7ce8a50 100644 --- a/jupyter_server/pytest_plugin.py +++ b/jupyter_server/pytest_plugin.py @@ -1,26 +1,25 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -import os -import sys import json -import pytest +import os import shutil +import sys import urllib.parse - from binascii import hexlify -import tornado -from tornado.escape import url_escape import jupyter_core.paths import nbformat +import pytest +import tornado +from tornado.escape import url_escape from traitlets.config import Config from jupyter_server.extension import serverextension from jupyter_server.serverapp import ServerApp -from jupyter_server.utils import url_path_join, run_sync from jupyter_server.services.contents.filemanager import FileContentsManager from jupyter_server.services.contents.largefilemanager import LargeFileManager +from jupyter_server.utils import run_sync +from jupyter_server.utils import url_path_join # List of dependencies needed for this plugin. @@ -33,12 +32,14 @@ import asyncio + if os.name == "nt" and sys.version_info >= (3, 7): asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # ============ Move to Jupyter Core ============= + def mkdir(tmp_path, *parts): path = tmp_path.joinpath(*parts) if not path.exists(): @@ -114,18 +115,15 @@ def jp_environ( monkeypatch.setenv("JUPYTER_CONFIG_DIR", str(jp_config_dir)) monkeypatch.setenv("JUPYTER_DATA_DIR", str(jp_data_dir)) monkeypatch.setenv("JUPYTER_RUNTIME_DIR", str(jp_runtime_dir)) - monkeypatch.setattr( - jupyter_core.paths, "SYSTEM_JUPYTER_PATH", [str(jp_system_jupyter_path)] - ) + monkeypatch.setattr(jupyter_core.paths, "SYSTEM_JUPYTER_PATH", [str(jp_system_jupyter_path)]) monkeypatch.setattr(jupyter_core.paths, "ENV_JUPYTER_PATH", [str(jp_env_jupyter_path)]) - monkeypatch.setattr( - jupyter_core.paths, "SYSTEM_CONFIG_PATH", [str(jp_system_config_path)] - ) + monkeypatch.setattr(jupyter_core.paths, "SYSTEM_CONFIG_PATH", [str(jp_system_config_path)]) monkeypatch.setattr(jupyter_core.paths, "ENV_CONFIG_PATH", [str(jp_env_config_path)]) # ================= End: Move to Jupyter core ================ + @pytest.fixture def jp_server_config(): """Allows tests to setup their specific configuration values. """ @@ -168,21 +166,21 @@ def jp_nbconvert_templates(jp_data_dir): # Get path to nbconvert template directory *before* # monkeypatching the paths env variable via the jp_environ fixture. - possible_paths = jupyter_core.paths.jupyter_path('nbconvert', 'templates') + possible_paths = jupyter_core.paths.jupyter_path("nbconvert", "templates") nbconvert_path = None for path in possible_paths: if os.path.exists(path): nbconvert_path = path break - nbconvert_target = jp_data_dir / 'nbconvert' / 'templates' + nbconvert_target = jp_data_dir / "nbconvert" / "templates" # copy nbconvert templates to new tmp data_dir. if nbconvert_path: shutil.copytree(nbconvert_path, str(nbconvert_target)) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def jp_configurable_serverapp( jp_nbconvert_templates, # this fixture must preceed jp_environ jp_environ, @@ -225,7 +223,7 @@ def _configurable_serverapp( token = hexlify(os.urandom(4)).decode("ascii") app = ServerApp.instance( # Set the log level to debug for testing purposes - log_level='DEBUG', + log_level="DEBUG", port=http_port, port_retries=0, open_browser=False, @@ -264,10 +262,12 @@ def jp_ensure_app_fixture(request): """ app_option = request.config.getoption("app_fixture") if app_option not in ["app", "jp_web_app"]: - raise Exception("jp_serverapp requires the `app-fixture` option " - "to be set to 'jp_web_app`. Try rerunning the " - "current tests with the option `--app-fixture " - "jp_web_app`.") + raise Exception( + "jp_serverapp requires the `app-fixture` option " + "to be set to 'jp_web_app`. Try rerunning the " + "current tests with the option `--app-fixture " + "jp_web_app`." + ) elif app_option == "app": # Manually set the app_fixture to `jp_web_app` if it's # not set already. @@ -275,12 +275,7 @@ def jp_ensure_app_fixture(request): @pytest.fixture(scope="function") -def jp_serverapp( - jp_ensure_app_fixture, - jp_server_config, - jp_argv, - jp_configurable_serverapp -): +def jp_serverapp(jp_ensure_app_fixture, jp_server_config, jp_argv, jp_configurable_serverapp): """Starts a Jupyter Server instance based on the established configuration values.""" app = jp_configurable_serverapp(config=jp_server_config, argv=jp_argv) yield app @@ -320,6 +315,7 @@ async def my_test(jp_fetch): response = await jp_fetch("api", "spec.yaml") ... """ + def client_fetch(*parts, headers={}, params={}, **kwargs): # Handle URL strings path_url = url_escape(url_path_join(*parts), plus=False) @@ -329,9 +325,8 @@ def client_fetch(*parts, headers={}, params={}, **kwargs): # Add auth keys to header headers.update(jp_auth_header) # Make request. - return http_server_client.fetch( - url, headers=headers, request_timeout=20, **kwargs - ) + return http_server_client.fetch(url, headers=headers, request_timeout=20, **kwargs) + return client_fetch @@ -362,45 +357,42 @@ async def my_test(jp_fetch, jp_ws_fetch): ) ... """ + def client_fetch(*parts, headers={}, params={}, **kwargs): # Handle URL strings path_url = url_escape(url_path_join(*parts), plus=False) base_path_url = url_path_join(jp_base_url, path_url) - urlparts = urllib.parse.urlparse('ws://localhost:{}'.format(jp_http_port)) - urlparts = urlparts._replace( - path=base_path_url, - query=urllib.parse.urlencode(params) - ) + urlparts = urllib.parse.urlparse("ws://localhost:{}".format(jp_http_port)) + urlparts = urlparts._replace(path=base_path_url, query=urllib.parse.urlencode(params)) url = urlparts.geturl() # Add auth keys to header headers.update(jp_auth_header) # Make request. - req = tornado.httpclient.HTTPRequest( - url, - headers=jp_auth_header, - connect_timeout=120 - ) + req = tornado.httpclient.HTTPRequest(url, headers=jp_auth_header, connect_timeout=120) return tornado.websocket.websocket_connect(req) + return client_fetch some_resource = u"The very model of a modern major general" sample_kernel_json = { - 'argv':['cat', '{connection_file}'], - 'display_name': 'Test kernel', + "argv": ["cat", "{connection_file}"], + "display_name": "Test kernel", } + + @pytest.fixture def jp_kernelspecs(jp_data_dir): """Configures some sample kernelspecs in the Jupyter data directory.""" - spec_names = ['sample', 'sample 2'] + spec_names = ["sample", "sample 2"] for name in spec_names: - sample_kernel_dir = jp_data_dir.joinpath('kernels', name) + sample_kernel_dir = jp_data_dir.joinpath("kernels", name) sample_kernel_dir.mkdir(parents=True) # Create kernel json file - sample_kernel_file = sample_kernel_dir.joinpath('kernel.json') + sample_kernel_file = sample_kernel_dir.joinpath("kernel.json") sample_kernel_file.write_text(json.dumps(sample_kernel_json)) # Create resources text - sample_kernel_resources = sample_kernel_dir.joinpath('resource.txt') + sample_kernel_resources = sample_kernel_dir.joinpath("resource.txt") sample_kernel_resources.write_text(some_resource) @@ -419,10 +411,11 @@ def jp_large_contents_manager(tmp_path): @pytest.fixture def jp_create_notebook(jp_root_dir): """Creates a notebook in the test's home directory.""" + def inner(nbpath): nbpath = jp_root_dir.joinpath(nbpath) # Check that the notebook has the correct file extension. - if nbpath.suffix != '.ipynb': + if nbpath.suffix != ".ipynb": raise Exception("File extension for notebook must be .ipynb") # If the notebook path has a parent directory, make sure it's created. parent = nbpath.parent @@ -431,6 +424,7 @@ def inner(nbpath): nb = nbformat.v4.new_notebook() nbtext = nbformat.writes(nb, version=4) nbpath.write_text(nbtext) + return inner diff --git a/jupyter_server/serverapp.py b/jupyter_server/serverapp.py index 33fc4256ed..b45030c600 100644 --- a/jupyter_server/serverapp.py +++ b/jupyter_server/serverapp.py @@ -1,21 +1,21 @@ # coding: utf-8 """A tornado based Jupyter server.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import binascii import datetime import errno import gettext import hashlib import hmac +import inspect import io import ipaddress import json import logging import mimetypes import os +import pathlib import random import re import select @@ -25,12 +25,10 @@ import sys import threading import time -import webbrowser import urllib -import inspect -import pathlib - +import webbrowser from base64 import encodebytes + try: import resource except ImportError: @@ -48,11 +46,10 @@ try: import tornado + assert tornado.version_info >= MIN_TORNADO except (ImportError, AttributeError, AssertionError) as e: # pragma: no cover - raise ImportError( - _i18n("The Jupyter Server requires tornado >=%s.%s.%s") % MIN_TORNADO - ) from e + raise ImportError(_i18n("The Jupyter Server requires tornado >=%s.%s.%s") % MIN_TORNADO) from e from tornado import httpserver from tornado import ioloop @@ -60,7 +57,7 @@ from tornado.httputil import url_concat from tornado.log import LogFormatter, app_log, access_log, gen_log -if not sys.platform.startswith('win'): +if not sys.platform.startswith("win"): from tornado.netutil import bind_unix_socket from jupyter_server import ( @@ -72,13 +69,24 @@ from jupyter_server.base.handlers import MainHandler, RedirectWithParams, Template404 from jupyter_server.log import log_request -from jupyter_server.services.kernels.kernelmanager import MappingKernelManager, AsyncMappingKernelManager +from jupyter_server.services.kernels.kernelmanager import ( + MappingKernelManager, + AsyncMappingKernelManager, +) from jupyter_server.services.config import ConfigManager from jupyter_server.services.contents.manager import AsyncContentsManager, ContentsManager -from jupyter_server.services.contents.filemanager import AsyncFileContentsManager, FileContentsManager +from jupyter_server.services.contents.filemanager import ( + AsyncFileContentsManager, + FileContentsManager, +) from jupyter_server.services.contents.largefilemanager import LargeFileManager from jupyter_server.services.sessions.sessionmanager import SessionManager -from jupyter_server.gateway.managers import GatewayMappingKernelManager, GatewayKernelSpecManager, GatewaySessionManager, GatewayClient +from jupyter_server.gateway.managers import ( + GatewayMappingKernelManager, + GatewayKernelSpecManager, + GatewaySessionManager, + GatewayClient, +) from jupyter_server.auth.login import LoginHandler from jupyter_server.auth.logout import LogoutHandler @@ -87,7 +95,9 @@ from traitlets.config import Config from traitlets.config.application import catch_config_error, boolean_flag from jupyter_core.application import ( - JupyterApp, base_flags, base_aliases, + JupyterApp, + base_flags, + base_aliases, ) from jupyter_core.paths import jupyter_config_path from jupyter_client import KernelManager @@ -95,8 +105,20 @@ from jupyter_client.session import Session from nbformat.sign import NotebookNotary from traitlets import ( - Any, Dict, Unicode, Integer, List, Bool, Bytes, Instance, - TraitError, Type, Float, observe, default, validate + Any, + Dict, + Unicode, + Integer, + List, + Bool, + Bytes, + Instance, + TraitError, + Type, + Float, + observe, + default, + validate, ) from jupyter_core.paths import jupyter_runtime_dir from jupyter_server._sysinfo import get_sys_info @@ -109,7 +131,7 @@ pathname2url, unix_socket_in_use, urlencode_unix_socket_path, - fetch + fetch, ) from jupyter_server.extension.serverextension import ServerExtensionApp @@ -120,13 +142,14 @@ # Tolerate missing terminado package. try: from jupyter_server.terminal import TerminalManager + terminado_available = True except ImportError: terminado_available = False -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Module globals -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _examples = """ jupyter server # start the server @@ -136,29 +159,29 @@ JUPYTER_SERVICE_HANDLERS = dict( auth=None, - api=['jupyter_server.services.api.handlers'], - config=['jupyter_server.services.config.handlers'], - contents=['jupyter_server.services.contents.handlers'], - files=['jupyter_server.files.handlers'], - kernels=['jupyter_server.services.kernels.handlers'], + api=["jupyter_server.services.api.handlers"], + config=["jupyter_server.services.config.handlers"], + contents=["jupyter_server.services.contents.handlers"], + files=["jupyter_server.files.handlers"], + kernels=["jupyter_server.services.kernels.handlers"], kernelspecs=[ - 'jupyter_server.kernelspecs.handlers', - 'jupyter_server.services.kernelspecs.handlers'], - nbconvert=[ - 'jupyter_server.nbconvert.handlers', - 'jupyter_server.services.nbconvert.handlers'], - security=['jupyter_server.services.security.handlers'], - sessions=['jupyter_server.services.sessions.handlers'], - shutdown=['jupyter_server.services.shutdown'], - view=['jupyter_server.view.handlers'] + "jupyter_server.kernelspecs.handlers", + "jupyter_server.services.kernelspecs.handlers", + ], + nbconvert=["jupyter_server.nbconvert.handlers", "jupyter_server.services.nbconvert.handlers"], + security=["jupyter_server.services.security.handlers"], + sessions=["jupyter_server.services.sessions.handlers"], + shutdown=["jupyter_server.services.shutdown"], + view=["jupyter_server.view.handlers"], ) # Added for backwards compatibility from classic notebook server. DEFAULT_SERVER_PORT = DEFAULT_JUPYTER_SERVER_PORT -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Helper functions -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + def random_ports(port, n): """Generate a list of n random ports near the given port. @@ -168,39 +191,72 @@ def random_ports(port, n): """ for i in range(min(5, n)): yield port + i - for i in range(n-5): - yield max(1, port + random.randint(-2*n, 2*n)) + for i in range(n - 5): + yield max(1, port + random.randint(-2 * n, 2 * n)) + def load_handlers(name): """Load the (URL pattern, handler) tuples for each component.""" - mod = __import__(name, fromlist=['default_handlers']) + mod = __import__(name, fromlist=["default_handlers"]) return mod.default_handlers -#----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- # The Tornado web application -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- -class ServerWebApplication(web.Application): - def __init__(self, jupyter_app, default_services, kernel_manager, contents_manager, - session_manager, kernel_spec_manager, - config_manager, extra_services, log, - base_url, default_url, settings_overrides, jinja_env_options): +class ServerWebApplication(web.Application): + def __init__( + self, + jupyter_app, + default_services, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options, + ): settings = self.init_settings( - jupyter_app, kernel_manager, contents_manager, - session_manager, kernel_spec_manager, config_manager, - extra_services, log, base_url, - default_url, settings_overrides, jinja_env_options) + jupyter_app, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options, + ) handlers = self.init_handlers(default_services, settings) super(ServerWebApplication, self).__init__(handlers, **settings) - def init_settings(self, jupyter_app, kernel_manager, contents_manager, - session_manager, kernel_spec_manager, - config_manager, extra_services, - log, base_url, default_url, settings_overrides, - jinja_env_options=None): + def init_settings( + self, + jupyter_app, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options=None, + ): _template_path = settings_overrides.get( "template_path", @@ -213,20 +269,24 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, jenv_opt = {"autoescape": True} jenv_opt.update(jinja_env_options if jinja_env_options else {}) - env = Environment(loader=FileSystemLoader(template_path), extensions=['jinja2.ext.i18n'], **jenv_opt) + env = Environment( + loader=FileSystemLoader(template_path), extensions=["jinja2.ext.i18n"], **jenv_opt + ) sys_info = get_sys_info() # If the user is running the server in a git directory, make the assumption # that this is a dev install and suggest to the developer `npm run build:watch`. - base_dir = os.path.realpath(os.path.join(__file__, '..', '..')) - dev_mode = os.path.exists(os.path.join(base_dir, '.git')) + base_dir = os.path.realpath(os.path.join(__file__, "..", "..")) + dev_mode = os.path.exists(os.path.join(base_dir, ".git")) - nbui = gettext.translation('nbui', localedir=os.path.join(base_dir, 'jupyter_server/i18n'), fallback=True) + nbui = gettext.translation( + "nbui", localedir=os.path.join(base_dir, "jupyter_server/i18n"), fallback=True + ) env.install_gettext_translations(nbui, newstyle=False) - if sys_info['commit_source'] == 'repository': + if sys_info["commit_source"] == "repository": # don't cache (rely on 304) when working from master - version_hash = '' + version_hash = "" else: # reset the cache on server restart version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") @@ -237,7 +297,7 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, home = os.path.expanduser("~") if root_dir.startswith(home + os.path.sep): # collapse $HOME to ~ - root_dir = '~' + root_dir[len(home):] + root_dir = "~" + root_dir[len(home) :] settings = dict( # basics @@ -247,22 +307,20 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, template_path=template_path, static_path=jupyter_app.static_file_path, static_custom_path=jupyter_app.static_custom_path, - static_handler_class = FileFindHandler, - static_url_prefix = url_path_join(base_url, '/static/'), - static_handler_args = { + static_handler_class=FileFindHandler, + static_url_prefix=url_path_join(base_url, "/static/"), + static_handler_args={ # don't cache custom.js - 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')], + "no_cache_paths": [url_path_join(base_url, "static", "custom")], }, version_hash=version_hash, - # rate limits iopub_msg_rate_limit=jupyter_app.iopub_msg_rate_limit, iopub_data_rate_limit=jupyter_app.iopub_data_rate_limit, rate_limit_window=jupyter_app.rate_limit_window, - # authentication cookie_secret=jupyter_app.cookie_secret, - login_url=url_path_join(base_url, '/login'), + login_url=url_path_join(base_url, "/login"), login_handler_class=jupyter_app.login_handler_class, logout_handler_class=jupyter_app.logout_handler_class, password=jupyter_app.password, @@ -271,17 +329,14 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, allow_remote_access=jupyter_app.allow_remote_access, local_hostnames=jupyter_app.local_hostnames, authenticate_prometheus=jupyter_app.authenticate_prometheus, - # managers kernel_manager=kernel_manager, contents_manager=contents_manager, session_manager=session_manager, kernel_spec_manager=kernel_spec_manager, config_manager=config_manager, - # handlers extra_services=extra_services, - # Jupyter stuff started=now, # place for extensions to register activity @@ -296,7 +351,7 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, server_root_dir=root_dir, jinja2_env=env, terminals_available=terminado_available and jupyter_app.terminals_enabled, - serverapp=jupyter_app + serverapp=jupyter_app, ) # allow custom overrides for the tornado web app. @@ -308,13 +363,13 @@ def init_handlers(self, default_services, settings): # Order matters. The first handler to match the URL will handle the request. handlers = [] # load extra services specified by users before default handlers - for service in settings['extra_services']: + for service in settings["extra_services"]: handlers.extend(load_handlers(service)) # Add auth services. - if 'auth' in default_services: - handlers.extend([(r"/login", settings['login_handler_class'])]) - handlers.extend([(r"/logout", settings['logout_handler_class'])]) + if "auth" in default_services: + handlers.extend([(r"/login", settings["login_handler_class"])]) + handlers.extend([(r"/logout", settings["logout_handler_class"])]) # Load default services. Raise exception if service not # found in JUPYTER_SERVICE_HANLDERS. @@ -325,19 +380,21 @@ def init_handlers(self, default_services, settings): for loc in locations: handlers.extend(load_handlers(loc)) else: - raise Exception("{} is not recognized as a jupyter_server " - "service. If this is a custom service, " - "try adding it to the " - "`extra_services` list.".format(service)) + raise Exception( + "{} is not recognized as a jupyter_server " + "service. If this is a custom service, " + "try adding it to the " + "`extra_services` list.".format(service) + ) # Add extra handlers from contents manager. - handlers.extend(settings['contents_manager'].get_extra_handlers()) + handlers.extend(settings["contents_manager"].get_extra_handlers()) # If gateway mode is enabled, replace appropriate handlers to perform redirection if GatewayClient.instance().gateway_enabled: # for each handler required for gateway, locate its pattern # in the current list and replace that entry... - gateway_handlers = load_handlers('jupyter_server.gateway.handlers') + gateway_handlers = load_handlers("jupyter_server.gateway.handlers") for i, gwh in enumerate(gateway_handlers): for j, h in enumerate(handlers): if gwh[0] == h[0]: @@ -345,28 +402,31 @@ def init_handlers(self, default_services, settings): break # register base handlers last - handlers.extend(load_handlers('jupyter_server.base.handlers')) + handlers.extend(load_handlers("jupyter_server.base.handlers")) - if settings['default_url'] != settings['base_url']: + if settings["default_url"] != settings["base_url"]: # set the URL that will be redirected from `/` handlers.append( - (r'/?', RedirectWithParams, { - 'url' : settings['default_url'], - 'permanent': False, # want 302, not 301 - }) + ( + r"/?", + RedirectWithParams, + { + "url": settings["default_url"], + "permanent": False, # want 302, not 301 + }, + ) ) else: - handlers.append( - (r"/", MainHandler)) + handlers.append((r"/", MainHandler)) # prepend base_url onto the patterns that we match new_handlers = [] for handler in handlers: - pattern = url_path_join(settings['base_url'], handler[0]) + pattern = url_path_join(settings["base_url"], handler[0]) new_handler = tuple([pattern] + list(handler[1:])) new_handlers.append(new_handler) # add 404 on the end, which will catch everything that falls through - new_handlers.append((r'(.*)', Template404)) + new_handlers.append((r"(.*)", Template404)) return new_handlers def last_activity(self): @@ -376,18 +436,18 @@ def last_activity(self): activity. """ sources = [ - self.settings['started'], - self.settings['kernel_manager'].last_kernel_activity, + self.settings["started"], + self.settings["kernel_manager"].last_kernel_activity, ] try: - sources.append(self.settings['api_last_activity']) + sources.append(self.settings["api_last_activity"]) except KeyError: pass try: - sources.append(self.settings['terminal_last_activity']) + sources.append(self.settings["terminal_last_activity"]) except KeyError: pass - sources.extend(self.settings['last_activity_times'].values()) + sources.extend(self.settings["last_activity_times"].values()) return max(sources) @@ -401,10 +461,11 @@ class JupyterPasswordApp(JupyterApp): description = __doc__ def _config_file_default(self): - return os.path.join(self.config_dir, 'jupyter_server_config.json') + return os.path.join(self.config_dir, "jupyter_server_config.json") def start(self): from jupyter_server.auth.security import set_password + set_password(config_file=self.config_file) self.log.info("Wrote hashed password to %s" % self.config_file) @@ -422,37 +483,39 @@ def shutdown_server(server_info, timeout=5, log=None): failed (on Windows). """ from tornado.httpclient import HTTPClient, HTTPRequest - url = server_info['url'] - pid = server_info['pid'] - if log: log.debug("POST request to %sapi/shutdown", url) + url = server_info["url"] + pid = server_info["pid"] - r = fetch( - url, - method="POST", - headers={'Authorization': 'token ' + server_info['token']} - ) + if log: + log.debug("POST request to %sapi/shutdown", url) + + r = fetch(url, method="POST", headers={"Authorization": "token " + server_info["token"]}) # Poll to see if it shut down. - for _ in range(timeout*10): + for _ in range(timeout * 10): if not check_pid(pid): - if log: log.debug("Server PID %s is gone", pid) + if log: + log.debug("Server PID %s is gone", pid) return True time.sleep(0.1) - if sys.platform.startswith('win'): + if sys.platform.startswith("win"): return False - if log: log.debug("SIGTERM to PID %s", pid) + if log: + log.debug("SIGTERM to PID %s", pid) os.kill(pid, signal.SIGTERM) # Poll to see if it shut down. for _ in range(timeout * 10): if not check_pid(pid): - if log: log.debug("Server PID %s is gone", pid) + if log: + log.debug("Server PID %s is gone", pid) return True time.sleep(0.1) - if log: log.debug("SIGKILL to PID %s", pid) + if log: + log.debug("SIGKILL to PID %s", pid) os.kill(pid, signal.SIGKILL) return True # SIGKILL cannot be caught @@ -462,11 +525,13 @@ class JupyterServerStopApp(JupyterApp): version = __version__ description = "Stop currently running Jupyter server for a given port" - port = Integer(DEFAULT_JUPYTER_SERVER_PORT, config=True, - help="Port of the server to be killed. Default %s" % DEFAULT_JUPYTER_SERVER_PORT) + port = Integer( + DEFAULT_JUPYTER_SERVER_PORT, + config=True, + help="Port of the server to be killed. Default %s" % DEFAULT_JUPYTER_SERVER_PORT, + ) - sock = Unicode(u'', config=True, - help="UNIX socket of the server to be killed.") + sock = Unicode(u"", config=True, help="UNIX socket of the server to be killed.") def parse_command_line(self, argv=None): super(JupyterServerStopApp, self).parse_command_line(argv) @@ -498,47 +563,60 @@ def start(self): self.exit("There are no running servers (per %s)" % self.runtime_dir) for server in servers: if self.sock: - sock = server.get('sock', None) + sock = server.get("sock", None) if sock and sock == self.sock: self._shutdown_or_exit(sock, server) # Attempt to remove the UNIX socket after stopping. self._maybe_remove_unix_socket(sock) return elif self.port: - port = server.get('port', None) + port = server.get("port", None) if port == self.port: self._shutdown_or_exit(port, server) return current_endpoint = self.sock or self.port print( - "There is currently no server running on {}".format(current_endpoint), - file=sys.stderr + "There is currently no server running on {}".format(current_endpoint), file=sys.stderr ) print("Ports/sockets currently in use:", file=sys.stderr) for server in servers: - print(" - {}".format(server.get('sock') or server['port']), file=sys.stderr) + print(" - {}".format(server.get("sock") or server["port"]), file=sys.stderr) self.exit(1) class JupyterServerListApp(JupyterApp): version = __version__ - description=_i18n("List currently running Jupyter servers.") + description = _i18n("List currently running Jupyter servers.") flags = dict( - jsonlist=({'JupyterServerListApp': {'jsonlist': True}}, - _i18n("Produce machine-readable JSON list output.")), - json=({'JupyterServerListApp': {'json': True}}, - _i18n("Produce machine-readable JSON object on each line of output.")), + jsonlist=( + {"JupyterServerListApp": {"jsonlist": True}}, + _i18n("Produce machine-readable JSON list output."), + ), + json=( + {"JupyterServerListApp": {"json": True}}, + _i18n("Produce machine-readable JSON object on each line of output."), + ), ) - jsonlist = Bool(False, config=True, - help=_i18n("If True, the output will be a JSON list of objects, one per " - "active Jupyer server, each with the details from the " - "relevant server info file.")) - json = Bool(False, config=True, - help=_i18n("If True, each line of output will be a JSON object with the " - "details from the server info file. For a JSON list output, " - "see the JupyterServerListApp.jsonlist configuration value")) + jsonlist = Bool( + False, + config=True, + help=_i18n( + "If True, the output will be a JSON list of objects, one per " + "active Jupyer server, each with the details from the " + "relevant server info file." + ), + ) + json = Bool( + False, + config=True, + help=_i18n( + "If True, each line of output will be a JSON object with the " + "details from the server info file. For a JSON list output, " + "see the JupyterServerListApp.jsonlist configuration value" + ), + ) def start(self): serverinfo_list = list(list_running_servers(self.runtime_dir)) @@ -550,90 +628,103 @@ def start(self): else: print("Currently running servers:") for serverinfo in serverinfo_list: - url = serverinfo['url'] - if serverinfo.get('token'): - url = url + '?token=%s' % serverinfo['token'] - print(url, "::", serverinfo['root_dir']) + url = serverinfo["url"] + if serverinfo.get("token"): + url = url + "?token=%s" % serverinfo["token"] + print(url, "::", serverinfo["root_dir"]) -#----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- # Aliases and Flags -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- flags = dict(base_flags) -flags['allow-root'] = ( - {'ServerApp' : {'allow_root' : True}}, - _i18n("Allow the server to be run from root user.") +flags["allow-root"] = ( + {"ServerApp": {"allow_root": True}}, + _i18n("Allow the server to be run from root user."), ) flags["no-browser"] = ( - { - "ServerApp": {"open_browser": False}, - "ExtensionApp": {"open_browser": False} - }, + {"ServerApp": {"open_browser": False}, "ExtensionApp": {"open_browser": False}}, _i18n("Prevent the opening of the default url in the browser."), ) flags["debug"] = ( - { - 'ServerApp': {'log_level': 'DEBUG'}, - 'ExtensionApp': {'log_level': 'DEBUG'} - }, - _i18n("Set debug level for the extension and underlying server applications.") + {"ServerApp": {"log_level": "DEBUG"}, "ExtensionApp": {"log_level": "DEBUG"}}, + _i18n("Set debug level for the extension and underlying server applications."), ) -flags['autoreload'] = ( - {'ServerApp': {'autoreload': True}}, +flags["autoreload"] = ( + {"ServerApp": {"autoreload": True}}, """Autoreload the webapp Enable reloading of the tornado webapp and all imported Python packages when any changes are made to any Python src files in server or extensions. - """ + """, ) # Add notebook manager flags -flags.update(boolean_flag('script', 'FileContentsManager.save_script', - 'DEPRECATED, IGNORED', - 'DEPRECATED, IGNORED')) +flags.update( + boolean_flag( + "script", "FileContentsManager.save_script", "DEPRECATED, IGNORED", "DEPRECATED, IGNORED" + ) +) aliases = dict(base_aliases) -aliases.update({ - 'ip': 'ServerApp.ip', - 'port': 'ServerApp.port', - 'port-retries': 'ServerApp.port_retries', - 'sock': 'ServerApp.sock', - 'sock-mode': 'ServerApp.sock_mode', - 'transport': 'KernelManager.transport', - 'keyfile': 'ServerApp.keyfile', - 'certfile': 'ServerApp.certfile', - 'client-ca': 'ServerApp.client_ca', - 'notebook-dir': 'ServerApp.root_dir', - 'preferred-dir': 'ServerApp.preferred_dir', - 'browser': 'ServerApp.browser', - 'pylab': 'ServerApp.pylab', - 'gateway-url': 'GatewayClient.url', -}) - -#----------------------------------------------------------------------------- +aliases.update( + { + "ip": "ServerApp.ip", + "port": "ServerApp.port", + "port-retries": "ServerApp.port_retries", + "sock": "ServerApp.sock", + "sock-mode": "ServerApp.sock_mode", + "transport": "KernelManager.transport", + "keyfile": "ServerApp.keyfile", + "certfile": "ServerApp.certfile", + "client-ca": "ServerApp.client_ca", + "notebook-dir": "ServerApp.root_dir", + "preferred-dir": "ServerApp.preferred_dir", + "browser": "ServerApp.browser", + "pylab": "ServerApp.pylab", + "gateway-url": "GatewayClient.url", + } +) + +# ----------------------------------------------------------------------------- # ServerApp -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + class ServerApp(JupyterApp): - name = 'jupyter-server' + name = "jupyter-server" version = __version__ - description = _i18n("""The Jupyter Server. + description = _i18n( + """The Jupyter Server. - This launches a Tornado-based Jupyter Server.""") + This launches a Tornado-based Jupyter Server.""" + ) examples = _examples flags = Dict(flags) aliases = Dict(aliases) classes = [ - KernelManager, Session, MappingKernelManager, KernelSpecManager, AsyncMappingKernelManager, - ContentsManager, FileContentsManager, AsyncContentsManager, AsyncFileContentsManager, NotebookNotary, - GatewayMappingKernelManager, GatewayKernelSpecManager, GatewaySessionManager, GatewayClient - ] + KernelManager, + Session, + MappingKernelManager, + KernelSpecManager, + AsyncMappingKernelManager, + ContentsManager, + FileContentsManager, + AsyncContentsManager, + AsyncFileContentsManager, + NotebookNotary, + GatewayMappingKernelManager, + GatewayKernelSpecManager, + GatewaySessionManager, + GatewayClient, + ] if terminado_available: # Only necessary when terminado is available classes.append(TerminalManager) @@ -648,51 +739,55 @@ class ServerApp(JupyterApp): # Subclasses can override this list to # expose a subset of these handlers. default_services = ( - 'api', - 'auth', - 'config', - 'contents', - 'files', - 'kernels', - 'kernelspecs', - 'nbconvert', - 'security', - 'sessions', - 'shutdown', - 'view' + "api", + "auth", + "config", + "contents", + "files", + "kernels", + "kernelspecs", + "nbconvert", + "security", + "sessions", + "shutdown", + "view", ) _log_formatter_cls = LogFormatter - @default('log_level') + @default("log_level") def _default_log_level(self): return logging.INFO - @default('log_format') + @default("log_format") def _default_log_format(self): """override default log format to include date & time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" # file to be opened in the Jupyter server - file_to_run = Unicode('', - help="Open the named file when the application is launched." - ).tag(config=True) + file_to_run = Unicode("", help="Open the named file when the application is launched.").tag( + config=True + ) - file_url_prefix = Unicode('notebooks', - help="The URL prefix where files are opened directly." + file_url_prefix = Unicode( + "notebooks", help="The URL prefix where files are opened directly." ).tag(config=True) # Network related information - allow_origin = Unicode('', config=True, + allow_origin = Unicode( + "", + config=True, help="""Set the Access-Control-Allow-Origin header Use '*' to allow any origin to access your server. Takes precedence over allow_origin_pat. - """ + """, ) - allow_origin_pat = Unicode('', config=True, + allow_origin_pat = Unicode( + "", + config=True, help="""Use a regular expression for the Access-Control-Allow-Origin header Requests from an origin matching the expression will get replies with: @@ -702,30 +797,30 @@ def _default_log_format(self): where `origin` is the origin of the request. Ignored if allow_origin is set. - """ + """, ) - allow_credentials = Bool(False, config=True, - help=_i18n("Set the Access-Control-Allow-Credentials: true header") + allow_credentials = Bool( + False, config=True, help=_i18n("Set the Access-Control-Allow-Credentials: true header") ) - allow_root = Bool(False, config=True, - help=_i18n("Whether to allow the user to run the server as root.") + allow_root = Bool( + False, config=True, help=_i18n("Whether to allow the user to run the server as root.") ) - autoreload = Bool(False, config=True, - help=_i18n("Reload the webapp when changes are made to any Python src files.") + autoreload = Bool( + False, + config=True, + help=_i18n("Reload the webapp when changes are made to any Python src files."), ) - default_url = Unicode('/', config=True, - help=_i18n("The default URL to redirect to from `/`") - ) + default_url = Unicode("/", config=True, help=_i18n("The default URL to redirect to from `/`")) - ip = Unicode('localhost', config=True, - help=_i18n("The IP address the Jupyter server will listen on.") + ip = Unicode( + "localhost", config=True, help=_i18n("The IP address the Jupyter server will listen on.") ) - @default('ip') + @default("ip") def _default_ip(self): """Return localhost if available, 127.0.0.1 otherwise. @@ -733,23 +828,28 @@ def _default_ip(self): """ s = socket.socket() try: - s.bind(('localhost', 0)) + s.bind(("localhost", 0)) except socket.error as e: - self.log.warning(_i18n("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s"), e) - return '127.0.0.1' + self.log.warning( + _i18n("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s"), e + ) + return "127.0.0.1" else: s.close() - return 'localhost' + return "localhost" - @validate('ip') + @validate("ip") def _validate_ip(self, proposal): - value = proposal['value'] - if value == u'*': - value = u'' + value = proposal["value"] + if value == u"*": + value = u"" return value - custom_display_url = Unicode(u'', config=True, - help=_i18n("""Override URL shown to users. + custom_display_url = Unicode( + u"", + config=True, + help=_i18n( + """Override URL shown to users. Replace actual URL, including protocol, address, port and base URL, with the given value when displaying URL to the users. Do not change @@ -758,97 +858,106 @@ def _validate_ip(self, proposal): This option is intended to be used when the URL to display to the user cannot be determined reliably by the Jupyter server (proxified - or containerized setups for example).""") + or containerized setups for example).""" + ), ) - port_env = 'JUPYTER_PORT' + port_env = "JUPYTER_PORT" port_default_value = DEFAULT_JUPYTER_SERVER_PORT port = Integer( - config=True, - help=_i18n("The port the server will listen on (env: JUPYTER_PORT).") + config=True, help=_i18n("The port the server will listen on (env: JUPYTER_PORT).") ) - @default('port') + @default("port") def port_default(self): return int(os.getenv(self.port_env, self.port_default_value)) - port_retries_env = 'JUPYTER_PORT_RETRIES' + port_retries_env = "JUPYTER_PORT_RETRIES" port_retries_default_value = 50 - port_retries = Integer(port_retries_default_value, config=True, - help=_i18n("The number of additional ports to try if the specified port is not " - "available (env: JUPYTER_PORT_RETRIES).") + port_retries = Integer( + port_retries_default_value, + config=True, + help=_i18n( + "The number of additional ports to try if the specified port is not " + "available (env: JUPYTER_PORT_RETRIES)." + ), ) - @default('port_retries') + @default("port_retries") def port_retries_default(self): return int(os.getenv(self.port_retries_env, self.port_retries_default_value)) - sock = Unicode(u'', config=True, - help="The UNIX socket the Jupyter server will listen on." - ) + sock = Unicode(u"", config=True, help="The UNIX socket the Jupyter server will listen on.") - sock_mode = Unicode('0600', config=True, - help="The permissions mode for UNIX socket creation (default: 0600)." + sock_mode = Unicode( + "0600", config=True, help="The permissions mode for UNIX socket creation (default: 0600)." ) - @validate('sock_mode') + @validate("sock_mode") def _validate_sock_mode(self, proposal): - value = proposal['value'] + value = proposal["value"] try: converted_value = int(value.encode(), 8) - assert all(( - # Ensure the mode is at least user readable/writable. - bool(converted_value & stat.S_IRUSR), - bool(converted_value & stat.S_IWUSR), - # And isn't out of bounds. - converted_value <= 2 ** 12 - )) - except ValueError: - raise TraitError( - 'invalid --sock-mode value: %s, please specify as e.g. "0600"' % value + assert all( + ( + # Ensure the mode is at least user readable/writable. + bool(converted_value & stat.S_IRUSR), + bool(converted_value & stat.S_IWUSR), + # And isn't out of bounds. + converted_value <= 2 ** 12, + ) ) + except ValueError: + raise TraitError('invalid --sock-mode value: %s, please specify as e.g. "0600"' % value) except AssertionError: raise TraitError( - 'invalid --sock-mode value: %s, must have u+rw (0600) at a minimum' % value + "invalid --sock-mode value: %s, must have u+rw (0600) at a minimum" % value ) return value - - certfile = Unicode(u'', config=True, - help=_i18n("""The full path to an SSL/TLS certificate file.""") + certfile = Unicode( + u"", config=True, help=_i18n("""The full path to an SSL/TLS certificate file.""") ) - keyfile = Unicode(u'', config=True, - help=_i18n("""The full path to a private key file for usage with SSL/TLS.""") + keyfile = Unicode( + u"", + config=True, + help=_i18n("""The full path to a private key file for usage with SSL/TLS."""), ) - client_ca = Unicode(u'', config=True, - help=_i18n("""The full path to a certificate authority certificate for SSL/TLS client authentication.""") + client_ca = Unicode( + u"", + config=True, + help=_i18n( + """The full path to a certificate authority certificate for SSL/TLS client authentication.""" + ), ) - cookie_secret_file = Unicode(config=True, - help=_i18n("""The file where the cookie secret is stored.""") + cookie_secret_file = Unicode( + config=True, help=_i18n("""The file where the cookie secret is stored.""") ) - @default('cookie_secret_file') + @default("cookie_secret_file") def _default_cookie_secret_file(self): - return os.path.join(self.runtime_dir, 'jupyter_cookie_secret') + return os.path.join(self.runtime_dir, "jupyter_cookie_secret") - cookie_secret = Bytes(b'', config=True, + cookie_secret = Bytes( + b"", + config=True, help="""The random bytes used to secure cookies. By default this is a new random number every time you start the server. Set it to a value in a config file to enable logins to persist across server sessions. Note: Cookie secrets should be kept private, do not share config files with cookie_secret stored in plaintext (you can read the value from a file). - """ + """, ) - @default('cookie_secret') + @default("cookie_secret") def _default_cookie_secret(self): if os.path.exists(self.cookie_secret_file): - with io.open(self.cookie_secret_file, 'rb') as f: + with io.open(self.cookie_secret_file, "rb") as f: key = f.read() else: key = encodebytes(os.urandom(32)) @@ -864,11 +973,14 @@ def _write_cookie_secret_file(self, secret): with secure_write(self.cookie_secret_file, True) as f: f.write(secret) except OSError as e: - self.log.error(_i18n("Failed to write cookie secret to %s: %s"), - self.cookie_secret_file, e) + self.log.error( + _i18n("Failed to write cookie secret to %s: %s"), self.cookie_secret_file, e + ) - token = Unicode('', - help=_i18n("""Token used for authenticating first-time connections to the server. + token = Unicode( + "", + help=_i18n( + """Token used for authenticating first-time connections to the server. The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly with the JUPYTER_TOKEN environment variable. @@ -877,37 +989,40 @@ def _write_cookie_secret_file(self, secret): the default is to generate a new, random token. Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. - """) + """ + ), ).tag(config=True) _token_generated = True - @default('token') + @default("token") def _token_default(self): - if os.getenv('JUPYTER_TOKEN'): + if os.getenv("JUPYTER_TOKEN"): self._token_generated = False - return os.getenv('JUPYTER_TOKEN') - if os.getenv('JUPYTER_TOKEN_FILE'): + return os.getenv("JUPYTER_TOKEN") + if os.getenv("JUPYTER_TOKEN_FILE"): self._token_generated = False - with io.open(os.getenv('JUPYTER_TOKEN_FILE'), "r") as token_file: + with io.open(os.getenv("JUPYTER_TOKEN_FILE"), "r") as token_file: return token_file.read() if self.password: # no token if password is enabled self._token_generated = False - return u'' + return u"" else: self._token_generated = True - return binascii.hexlify(os.urandom(24)).decode('ascii') + return binascii.hexlify(os.urandom(24)).decode("ascii") - min_open_files_limit = Integer(config=True, + min_open_files_limit = Integer( + config=True, help=""" Gets or sets a lower bound on the open file handles process resource limit. This may need to be increased if you run into an OSError: [Errno 24] Too many open files. This is not applicable when running on Windows. - """) + """, + ) - @default('min_open_files_limit') + @default("min_open_files_limit") def _default_min_open_files_limit(self): if resource is None: # Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows) @@ -919,11 +1034,15 @@ def _default_min_open_files_limit(self): if hard >= DEFAULT_SOFT: return DEFAULT_SOFT - self.log.debug("Default value for min_open_files_limit is ignored (hard=%r, soft=%r)", hard, soft) + self.log.debug( + "Default value for min_open_files_limit is ignored (hard=%r, soft=%r)", hard, soft + ) return soft - max_body_size = Integer(512 * 1024 * 1024, config=True, + max_body_size = Integer( + 512 * 1024 * 1024, + config=True, help=""" Sets the maximum allowed size of the client request body, specified in the Content-Length request header field. If the size in a request @@ -931,55 +1050,64 @@ def _default_min_open_files_limit(self): the client. Note: max_body_size is applied even in streaming mode. - """ + """, ) - max_buffer_size = Integer(512 * 1024 * 1024, config=True, + max_buffer_size = Integer( + 512 * 1024 * 1024, + config=True, help=""" Gets or sets the maximum amount of memory, in bytes, that is allocated for use by the buffer manager. - """ + """, ) - @observe('token') + @observe("token") def _token_changed(self, change): self._token_generated = False - password = Unicode(u'', config=True, - help="""Hashed password to use for web authentication. + password = Unicode( + u"", + config=True, + help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: from jupyter_server.auth import passwd; passwd() The string should be of the form type:salt:hashed-password. - """ + """, ) - password_required = Bool(False, config=True, - help="""Forces users to use a password for the Jupyter server. + password_required = Bool( + False, + config=True, + help="""Forces users to use a password for the Jupyter server. This is useful in a multi user environment, for instance when everybody in the LAN can access each other's machine through ssh. In such a case, serving on localhost is not secure since any user can connect to the Jupyter server via ssh. - """ + """, ) - allow_password_change = Bool(True, config=True, - help="""Allow password to be changed at login for the Jupyter server. + allow_password_change = Bool( + True, + config=True, + help="""Allow password to be changed at login for the Jupyter server. While logging in with a token, the Jupyter server UI will give the opportunity to the user to enter a new password at the same time that will replace the token login mechanism. This can be set to false to prevent changing password from the UI/API. - """ + """, ) - - disable_check_xsrf = Bool(False, config=True, + disable_check_xsrf = Bool( + False, + config=True, help="""Disable cross-site-request-forgery protection Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, @@ -992,11 +1120,12 @@ def _token_changed(self, change): completely without authentication. These services can disable all authentication and security checks, with the full knowledge of what that implies. - """ + """, ) - allow_remote_access = Bool(config=True, - help="""Allow requests where the Host header doesn't point to a local server + allow_remote_access = Bool( + config=True, + help="""Allow requests where the Host header doesn't point to a local server By default, requests get a 403 forbidden response if the 'Host' header shows that the browser thinks it's on a non-local domain. @@ -1008,9 +1137,10 @@ def _token_changed(self, change): Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, along with hostnames configured in local_hostnames. - """) + """, + ) - @default('allow_remote_access') + @default("allow_remote_access") def _default_allow_remote(self): """Disallow remote access if we're listening only on loopback addresses""" @@ -1027,7 +1157,7 @@ def _default_allow_remote(self): addr = info[4][0] try: - parsed = ipaddress.ip_address(addr.split('%')[0]) + parsed = ipaddress.ip_address(addr.split("%")[0]) except ValueError: self.log.warning("Unrecognised IP address: %r", addr) continue @@ -1035,14 +1165,15 @@ def _default_allow_remote(self): # Macs map localhost to 'fe80::1%lo0', a link local address # scoped to the loopback interface. For now, we'll assume that # any scoped link-local address is effectively local. - if not (parsed.is_loopback - or (('%' in addr) and parsed.is_link_local)): + if not (parsed.is_loopback or (("%" in addr) and parsed.is_link_local)): return True return False else: return not addr.is_loopback - use_redirect_file = Bool(True, config=True, + use_redirect_file = Bool( + True, + config=True, help="""Disable launching browser by redirect file For versions of notebook > 5.7.2, a security feature measure was added that prevented the authentication token used to launch the browser from being visible. @@ -1055,35 +1186,47 @@ def _default_allow_remote(self): Disabling this setting to False will disable this behavior, allowing the browser to launch by using a URL and visible token (as before). - """ + """, ) - local_hostnames = List(Unicode(), ['localhost'], config=True, - help="""Hostnames to allow as local when allow_remote_access is False. + local_hostnames = List( + Unicode(), + ["localhost"], + config=True, + help="""Hostnames to allow as local when allow_remote_access is False. Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted as local as well. - """ + """, ) - open_browser = Bool(False, config=True, - help="""Whether to open in a browser after starting. + open_browser = Bool( + False, + config=True, + help="""Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (ServerApp.browser) configuration option. - """) + """, + ) - browser = Unicode(u'', config=True, - help="""Specify what command to use to invoke a web + browser = Unicode( + u"", + config=True, + help="""Specify what command to use to invoke a web browser when starting the server. If not specified, the default browser will be determined by the `webbrowser` standard library module, which allows setting of the BROWSER environment variable to override it. - """) + """, + ) - webbrowser_open_new = Integer(2, config=True, - help=_i18n("""Specify where to open the server on startup. This is the + webbrowser_open_new = Integer( + 2, + config=True, + help=_i18n( + """Specify where to open the server on startup. This is the `new` argument passed to the standard library method `webbrowser.open`. The behaviour is not guaranteed, but depends on browser support. Valid values are: @@ -1093,14 +1236,22 @@ def _default_allow_remote(self): - 0 opens in an existing window. See the `webbrowser.open` documentation for details. - """)) + """ + ), + ) - tornado_settings = Dict(config=True, - help=_i18n("Supply overrides for the tornado.web.Application that the " - "Jupyter server uses.")) + tornado_settings = Dict( + config=True, + help=_i18n( + "Supply overrides for the tornado.web.Application that the " "Jupyter server uses." + ), + ) - websocket_compression_options = Any(None, config=True, - help=_i18n(""" + websocket_compression_options = Any( + None, + config=True, + help=_i18n( + """ Set the tornado compression options for websocket connections. This value will be returned from :meth:`WebSocketHandler.get_compression_options`. @@ -1108,54 +1259,72 @@ def _default_allow_remote(self): A dict (even an empty one) will enable compression. See the tornado docs for WebSocketHandler.get_compression_options for details. - """) + """ + ), + ) + terminado_settings = Dict( + config=True, + help=_i18n('Supply overrides for terminado. Currently only supports "shell_command".'), ) - terminado_settings = Dict(config=True, - help=_i18n('Supply overrides for terminado. Currently only supports "shell_command".')) - cookie_options = Dict(config=True, - help=_i18n("Extra keyword arguments to pass to `set_secure_cookie`." - " See tornado's set_secure_cookie docs for details.") + cookie_options = Dict( + config=True, + help=_i18n( + "Extra keyword arguments to pass to `set_secure_cookie`." + " See tornado's set_secure_cookie docs for details." + ), ) - get_secure_cookie_kwargs = Dict(config=True, - help=_i18n("Extra keyword arguments to pass to `get_secure_cookie`." - " See tornado's get_secure_cookie docs for details.") + get_secure_cookie_kwargs = Dict( + config=True, + help=_i18n( + "Extra keyword arguments to pass to `get_secure_cookie`." + " See tornado's get_secure_cookie docs for details." + ), ) ssl_options = Dict( - allow_none=True, - config=True, - help=_i18n("""Supply SSL options for the tornado HTTPServer. - See the tornado docs for details.""")) + allow_none=True, + config=True, + help=_i18n( + """Supply SSL options for the tornado HTTPServer. + See the tornado docs for details.""" + ), + ) - jinja_environment_options = Dict(config=True, - help=_i18n("Supply extra arguments that will be passed to Jinja environment.")) + jinja_environment_options = Dict( + config=True, help=_i18n("Supply extra arguments that will be passed to Jinja environment.") + ) jinja_template_vars = Dict( config=True, help=_i18n("Extra variables to supply to jinja templates when rendering."), ) - base_url = Unicode('/', config=True, - help='''The base URL for the Jupyter server. + base_url = Unicode( + "/", + config=True, + help="""The base URL for the Jupyter server. Leading and trailing slashes can be omitted, and will automatically be added. - ''') + """, + ) - @validate('base_url') + @validate("base_url") def _update_base_url(self, proposal): - value = proposal['value'] - if not value.startswith('/'): - value = '/' + value - if not value.endswith('/'): - value = value + '/' + value = proposal["value"] + if not value.startswith("/"): + value = "/" + value + if not value.endswith("/"): + value = value + "/" return value - extra_static_paths = List(Unicode(), config=True, + extra_static_paths = List( + Unicode(), + config=True, help="""Extra paths to search for serving static files. This allows adding javascript/css to be available from the Jupyter server machine, - or overriding individual files in the IPython""" + or overriding individual files in the IPython""", ) @property @@ -1163,22 +1332,20 @@ def static_file_path(self): """return extra paths + the default location""" return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] - static_custom_path = List(Unicode(), - help=_i18n("""Path to search for custom.js, css""") - ) + static_custom_path = List(Unicode(), help=_i18n("""Path to search for custom.js, css""")) - @default('static_custom_path') + @default("static_custom_path") def _default_static_custom_path(self): - return [ - os.path.join(d, 'custom') for d in ( - self.config_dir, - DEFAULT_STATIC_FILES_PATH) - ] + return [os.path.join(d, "custom") for d in (self.config_dir, DEFAULT_STATIC_FILES_PATH)] - extra_template_paths = List(Unicode(), config=True, - help=_i18n("""Extra paths to search for serving jinja templates. + extra_template_paths = List( + Unicode(), + config=True, + help=_i18n( + """Extra paths to search for serving jinja templates. - Can be used to override templates from jupyter_server.templates.""") + Can be used to override templates from jupyter_server.templates.""" + ), ) @property @@ -1186,20 +1353,28 @@ def template_file_path(self): """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST - extra_services = List(Unicode(), config=True, - help=_i18n("""handlers that should be loaded at higher priority than the default services""") + extra_services = List( + Unicode(), + config=True, + help=_i18n( + """handlers that should be loaded at higher priority than the default services""" + ), ) - websocket_url = Unicode("", config=True, + websocket_url = Unicode( + "", + config=True, help="""The base URL for websockets, if it differs from the HTTP server (hint: it almost certainly doesn't). Should be in the form of an HTTP origin: ws[s]://hostname[:port] - """ + """, ) - quit_button = Bool(True, config=True, - help="""If True, display controls to shut down the Jupyter server, such as menu items or buttons.""" + quit_button = Bool( + True, + config=True, + help="""If True, display controls to shut down the Jupyter server, such as menu items or buttons.""", ) # REMOVE in VERSION 2.0 @@ -1208,23 +1383,23 @@ def template_file_path(self): contents_manager_class = TypeFromClasses( default_value=LargeFileManager, klasses=[ - 'jupyter_server.services.contents.manager.ContentsManager', - 'notebook.services.contents.manager.ContentsManager' + "jupyter_server.services.contents.manager.ContentsManager", + "notebook.services.contents.manager.ContentsManager", ], config=True, - help=_i18n('The content manager class to use.') + help=_i18n("The content manager class to use."), ) # Throws a deprecation warning to notebook based contents managers. - @observe('contents_manager_class') + @observe("contents_manager_class") def _observe_contents_manager_class(self, change): - new = change['new'] + new = change["new"] # If 'new' is a class, get a string representing the import # module path. if inspect.isclass(new): new = new.__module__ - if new.startswith('notebook'): + if new.startswith("notebook"): self.log.warning( "The specified 'contents_manager_class' class inherits a manager from the " "'notebook' package. This is not guaranteed to work in future " @@ -1238,19 +1413,15 @@ def _observe_contents_manager_class(self, change): default_value=AsyncMappingKernelManager, klass=MappingKernelManager, config=True, - help=_i18n('The kernel manager class to use.') + help=_i18n("The kernel manager class to use."), ) session_manager_class = Type( - default_value=SessionManager, - config=True, - help=_i18n('The session manager class to use.') + default_value=SessionManager, config=True, help=_i18n("The session manager class to use.") ) config_manager_class = Type( - default_value=ConfigManager, - config = True, - help=_i18n('The config manager class to use') + default_value=ConfigManager, config=True, help=_i18n("The config manager class to use") ) kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) @@ -1264,88 +1435,96 @@ def _observe_contents_manager_class(self, change): The Api of KernelSpecManager is provisional and might change without warning between this version of Jupyter and the next stable one. - """ + """, ) login_handler_class = Type( default_value=LoginHandler, klass=web.RequestHandler, config=True, - help=_i18n('The login handler class to use.'), + help=_i18n("The login handler class to use."), ) logout_handler_class = Type( default_value=LogoutHandler, klass=web.RequestHandler, config=True, - help=_i18n('The logout handler class to use.'), + help=_i18n("The logout handler class to use."), ) - trust_xheaders = Bool(False, config=True, - help=(_i18n("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" - "sent by the upstream reverse proxy. Necessary if the proxy handles SSL")) + trust_xheaders = Bool( + False, + config=True, + help=( + _i18n( + "Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" + "sent by the upstream reverse proxy. Necessary if the proxy handles SSL" + ) + ), ) info_file = Unicode() - @default('info_file') + @default("info_file") def _default_info_file(self): info_file = "jpserver-%s.json" % os.getpid() return os.path.join(self.runtime_dir, info_file) browser_open_file = Unicode() - @default('browser_open_file') + @default("browser_open_file") def _default_browser_open_file(self): basename = "jpserver-%s-open.html" % os.getpid() return os.path.join(self.runtime_dir, basename) browser_open_file_to_run = Unicode() - @default('browser_open_file_to_run') + @default("browser_open_file_to_run") def _default_browser_open_file_to_run(self): basename = "jpserver-file-to-run-%s-open.html" % os.getpid() return os.path.join(self.runtime_dir, basename) - pylab = Unicode('disabled', config=True, - help=_i18n(""" + pylab = Unicode( + "disabled", + config=True, + help=_i18n( + """ DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. - """) + """ + ), ) - @observe('pylab') + @observe("pylab") def _update_pylab(self, change): """when --pylab is specified, display a warning and exit""" - if change['new'] != 'warn': - backend = ' %s' % change['new'] + if change["new"] != "warn": + backend = " %s" % change["new"] else: - backend = '' - self.log.error(_i18n("Support for specifying --pylab on the command line has been removed.")) + backend = "" self.log.error( - _i18n("Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.").format(backend) + _i18n("Support for specifying --pylab on the command line has been removed.") + ) + self.log.error( + _i18n("Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.").format( + backend + ) ) self.exit(1) - notebook_dir = Unicode( - config=True, - help=_i18n("DEPRECATED, use root_dir.") - ) + notebook_dir = Unicode(config=True, help=_i18n("DEPRECATED, use root_dir.")) - @observe('notebook_dir') + @observe("notebook_dir") def _update_notebook_dir(self, change): if self._root_dir_set: # only use deprecated config if new config is not set return self.log.warning(_i18n("notebook_dir is deprecated, use root_dir")) - self.root_dir = change['new'] + self.root_dir = change["new"] - root_dir = Unicode( - config=True, - help=_i18n("The directory to use for notebooks and kernels.") - ) + root_dir = Unicode(config=True, help=_i18n("The directory to use for notebooks and kernels.")) _root_dir_set = False - @default('root_dir') + @default("root_dir") def _default_root_dir(self): if self.file_to_run: self._root_dir_set = True @@ -1365,50 +1544,61 @@ def _normalize_dir(self, value): value = os.path.abspath(value) return value - @validate('root_dir') + @validate("root_dir") def _root_dir_validate(self, proposal): - value = self._normalize_dir(proposal['value']) + value = self._normalize_dir(proposal["value"]) if not os.path.isdir(value): raise TraitError(trans.gettext("No such directory: '%r'") % value) return value - preferred_dir = Unicode(config=True, - help=trans.gettext("Preferred starting directory to use for notebooks and kernels.") + preferred_dir = Unicode( + config=True, + help=trans.gettext("Preferred starting directory to use for notebooks and kernels."), ) - @default('preferred_dir') + @default("preferred_dir") def _default_prefered_dir(self): return self.root_dir - @validate('preferred_dir') + @validate("preferred_dir") def _preferred_dir_validate(self, proposal): - value = self._normalize_dir(proposal['value']) + value = self._normalize_dir(proposal["value"]) if not os.path.isdir(value): raise TraitError(trans.gettext("No such preferred dir: '%r'") % value) # preferred_dir must be equal or a subdir of root_dir if not value.startswith(self.root_dir): - raise TraitError(trans.gettext("preferred_dir must be equal or a subdir of root_dir: '%r'") % value) + raise TraitError( + trans.gettext("preferred_dir must be equal or a subdir of root_dir: '%r'") % value + ) return value - @observe('root_dir') + @observe("root_dir") def _root_dir_changed(self, change): self._root_dir_set = True - if not self.preferred_dir.startswith(change['new']): - self.log.warning(trans.gettext("Value of preferred_dir updated to use value of root_dir")) - self.preferred_dir = change['new'] + if not self.preferred_dir.startswith(change["new"]): + self.log.warning( + trans.gettext("Value of preferred_dir updated to use value of root_dir") + ) + self.preferred_dir = change["new"] - @observe('server_extensions') + @observe("server_extensions") def _update_server_extensions(self, change): self.log.warning(_i18n("server_extensions is deprecated, use jpserver_extensions")) - self.server_extensions = change['new'] + self.server_extensions = change["new"] - jpserver_extensions = Dict({}, config=True, - help=(_i18n("Dict of Python modules to load as Jupyter server extensions." - "Entry values can be used to enable and disable the loading of" - "the extensions. The extensions will be loaded in alphabetical " - "order.")) + jpserver_extensions = Dict( + {}, + config=True, + help=( + _i18n( + "Dict of Python modules to load as Jupyter server extensions." + "Entry values can be used to enable and disable the loading of" + "the extensions. The extensions will be loaded in alphabetical " + "order." + ) + ), ) reraise_server_extension_failures = Bool( @@ -1417,36 +1607,63 @@ def _update_server_extensions(self, change): help=_i18n("Reraise exceptions encountered loading server extensions?"), ) - iopub_msg_rate_limit = Float(1000, config=True, help=_i18n("""(msgs/sec) + iopub_msg_rate_limit = Float( + 1000, + config=True, + help=_i18n( + """(msgs/sec) Maximum rate at which messages can be sent on iopub before they are - limited.""")) + limited.""" + ), + ) - iopub_data_rate_limit = Float(1000000, config=True, help=_i18n("""(bytes/sec) + iopub_data_rate_limit = Float( + 1000000, + config=True, + help=_i18n( + """(bytes/sec) Maximum rate at which stream output can be sent on iopub before they are - limited.""")) + limited.""" + ), + ) - rate_limit_window = Float(3, config=True, help=_i18n("""(sec) Time window used to - check the message and data rate limits.""")) + rate_limit_window = Float( + 3, + config=True, + help=_i18n( + """(sec) Time window used to + check the message and data rate limits.""" + ), + ) - shutdown_no_activity_timeout = Integer(0, config=True, - help=("Shut down the server after N seconds with no kernels or " - "terminals running and no activity. " - "This can be used together with culling idle kernels " - "(MappingKernelManager.cull_idle_timeout) to " - "shutdown the Jupyter server when it's not in use. This is not " - "precisely timed: it may shut down up to a minute later. " - "0 (the default) disables this automatic shutdown.") + shutdown_no_activity_timeout = Integer( + 0, + config=True, + help=( + "Shut down the server after N seconds with no kernels or " + "terminals running and no activity. " + "This can be used together with culling idle kernels " + "(MappingKernelManager.cull_idle_timeout) to " + "shutdown the Jupyter server when it's not in use. This is not " + "precisely timed: it may shut down up to a minute later. " + "0 (the default) disables this automatic shutdown." + ), ) - terminals_enabled = Bool(True, config=True, - help=_i18n("""Set to False to disable terminals. + terminals_enabled = Bool( + True, + config=True, + help=_i18n( + """Set to False to disable terminals. This does *not* make the server more secure by itself. Anything the user can in a terminal, they can also do in a notebook. Terminals may also be automatically disabled if the terminado package is not available. - """)) + """ + ), + ) # Since use of terminals is also a function of whether the terminado package is # available, this variable holds the "final indication" of whether terminal functionality @@ -1462,13 +1679,13 @@ def _update_server_extensions(self, change): help="""" Require authentication to access prometheus metrics. """, - config=True + config=True, ) _starter_app = Instance( default_value=None, allow_none=True, - klass='jupyter_server.extension.application.ExtensionApp' + klass="jupyter_server.extension.application.ExtensionApp", ) @property @@ -1504,9 +1721,13 @@ def init_configurables(self): self.gateway_config = GatewayClient.instance(parent=self) if self.gateway_config.gateway_enabled: - self.kernel_manager_class = 'jupyter_server.gateway.managers.GatewayMappingKernelManager' - self.session_manager_class = 'jupyter_server.gateway.managers.GatewaySessionManager' - self.kernel_spec_manager_class = 'jupyter_server.gateway.managers.GatewayKernelSpecManager' + self.kernel_manager_class = ( + "jupyter_server.gateway.managers.GatewayMappingKernelManager" + ) + self.session_manager_class = "jupyter_server.gateway.managers.GatewaySessionManager" + self.kernel_spec_manager_class = ( + "jupyter_server.gateway.managers.GatewayKernelSpecManager" + ) self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, @@ -1542,29 +1763,31 @@ def init_logging(self): # consistent log output name (ServerApp instead of tornado.access, etc.) log.name = self.log.name # hook up tornado 3's loggers to our app handlers - logger = logging.getLogger('tornado') + logger = logging.getLogger("tornado") logger.propagate = True logger.parent = self.log logger.setLevel(self.log.level) def init_webapp(self): """initialize tornado webapp""" - self.tornado_settings['allow_origin'] = self.allow_origin - self.tornado_settings['websocket_compression_options'] = self.websocket_compression_options + self.tornado_settings["allow_origin"] = self.allow_origin + self.tornado_settings["websocket_compression_options"] = self.websocket_compression_options if self.allow_origin_pat: - self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat) - self.tornado_settings['allow_credentials'] = self.allow_credentials - self.tornado_settings['autoreload'] = self.autoreload - self.tornado_settings['cookie_options'] = self.cookie_options - self.tornado_settings['get_secure_cookie_kwargs'] = self.get_secure_cookie_kwargs - self.tornado_settings['token'] = self.token + self.tornado_settings["allow_origin_pat"] = re.compile(self.allow_origin_pat) + self.tornado_settings["allow_credentials"] = self.allow_credentials + self.tornado_settings["autoreload"] = self.autoreload + self.tornado_settings["cookie_options"] = self.cookie_options + self.tornado_settings["get_secure_cookie_kwargs"] = self.get_secure_cookie_kwargs + self.tornado_settings["token"] = self.token # ensure default_url starts with base_url if not self.default_url.startswith(self.base_url): self.default_url = url_path_join(self.base_url, self.default_url) if self.password_required and (not self.password): - self.log.critical(_i18n("Jupyter servers are configured to only be run with a password.")) + self.log.critical( + _i18n("Jupyter servers are configured to only be run with a password.") + ) self.log.critical(_i18n("Hint: run the following command to set a password")) self.log.critical(_i18n("\t$ python -m jupyter_server.auth password")) sys.exit(1) @@ -1573,7 +1796,7 @@ def init_webapp(self): if self.sock: if self.port != DEFAULT_JUPYTER_SERVER_PORT: self.log.critical( - ('Options --port and --sock are mutually exclusive. Aborting.'), + ("Options --port and --sock are mutually exclusive. Aborting."), ) sys.exit(1) else: @@ -1583,35 +1806,45 @@ def init_webapp(self): if self.open_browser: # If we're bound to a UNIX socket, we can't reliably connect from a browser. self.log.info( - ('Ignoring --ServerApp.open_browser due to --sock being used.'), + ("Ignoring --ServerApp.open_browser due to --sock being used."), ) if self.file_to_run: self.log.critical( - ('Options --ServerApp.file_to_run and --sock are mutually exclusive.'), + ("Options --ServerApp.file_to_run and --sock are mutually exclusive."), ) sys.exit(1) - if sys.platform.startswith('win'): + if sys.platform.startswith("win"): self.log.critical( - ('Option --sock is not supported on Windows, but got value of %s. Aborting.' % self.sock), + ( + "Option --sock is not supported on Windows, but got value of %s. Aborting." + % self.sock + ), ) sys.exit(1) - self.web_app = ServerWebApplication( - self, self.default_services, self.kernel_manager, self.contents_manager, - self.session_manager, self.kernel_spec_manager, - self.config_manager, self.extra_services, - self.log, self.base_url, self.default_url, self.tornado_settings, + self, + self.default_services, + self.kernel_manager, + self.contents_manager, + self.session_manager, + self.kernel_spec_manager, + self.config_manager, + self.extra_services, + self.log, + self.base_url, + self.default_url, + self.tornado_settings, self.jinja_environment_options, ) if self.certfile: - self.ssl_options['certfile'] = self.certfile + self.ssl_options["certfile"] = self.certfile if self.keyfile: - self.ssl_options['keyfile'] = self.keyfile + self.ssl_options["keyfile"] = self.keyfile if self.client_ca: - self.ssl_options['ca_certs'] = self.client_ca + self.ssl_options["ca_certs"] = self.client_ca if not self.ssl_options: # could be an empty dict or None # None indicates no SSL config @@ -1619,14 +1852,14 @@ def init_webapp(self): else: # SSL may be missing, so only import it if it's to be used import ssl + # PROTOCOL_TLS selects the highest ssl/tls protocol version that both the client and # server support. When PROTOCOL_TLS is not available use PROTOCOL_SSLv23. self.ssl_options.setdefault( - 'ssl_version', - getattr(ssl, 'PROTOCOL_TLS', ssl.PROTOCOL_SSLv23) + "ssl_version", getattr(ssl, "PROTOCOL_TLS", ssl.PROTOCOL_SSLv23) ) - if self.ssl_options.get('ca_certs', False): - self.ssl_options.setdefault('cert_reqs', ssl.CERT_REQUIRED) + if self.ssl_options.get("ca_certs", False): + self.ssl_options.setdefault("cert_reqs", ssl.CERT_REQUIRED) ssl_options = self.ssl_options self.login_handler_class.validate_security(self, ssl_options=self.ssl_options) @@ -1634,7 +1867,9 @@ def init_webapp(self): def init_resources(self): """initialize system resources""" if resource is None: - self.log.debug('Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows)') + self.log.debug( + "Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows)" + ) return old_soft, old_hard = resource.getrlimit(resource.RLIMIT_NOFILE) @@ -1644,7 +1879,9 @@ def init_resources(self): if hard < soft: hard = soft self.log.debug( - 'Raising open file limit: soft {}->{}; hard {}->{}'.format(old_soft, soft, old_hard, hard) + "Raising open file limit: soft {}->{}; hard {}->{}".format( + old_soft, soft, old_hard, hard + ) ) resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) @@ -1654,34 +1891,29 @@ def _get_urlparts(self, path=None, include_token=False): The returned tuple can be manipulated using the `_replace` method. """ if self.sock: - scheme = 'http+unix' + scheme = "http+unix" netloc = urlencode_unix_socket_path(self.sock) else: # Handle nonexplicit hostname. - if self.ip in ('', '0.0.0.0'): - ip = "%s" % socket.gethostname() + if self.ip in ("", "0.0.0.0"): + ip = "%s" % socket.gethostname() else: ip = self.ip netloc = "{ip}:{port}".format(ip=ip, port=self.port) if self.certfile: - scheme = 'https' + scheme = "https" else: - scheme = 'http' + scheme = "http" if not path: path = self.default_url query = None if include_token: - if self.token: # Don't log full token if it came from config - token = self.token if self._token_generated else '...' - query = urllib.parse.urlencode({'token': token}) + if self.token: # Don't log full token if it came from config + token = self.token if self._token_generated else "..." + query = urllib.parse.urlencode({"token": token}) # Build the URL Parts to dump. urlparts = urllib.parse.ParseResult( - scheme=scheme, - netloc=netloc, - path=path, - params=None, - query=query, - fragment=None + scheme=scheme, netloc=netloc, path=path, params=None, query=query, fragment=None ) return urlparts @@ -1711,11 +1943,7 @@ def display_url(self): """Human readable string with URLs for interacting with the running Jupyter Server """ - url = ( - self.public_url - + '\n or ' + - self.local_url - ) + url = self.public_url + "\n or " + self.local_url return url @property @@ -1729,19 +1957,20 @@ def init_terminals(self): try: from jupyter_server.terminal import initialize + initialize(self.web_app, self.root_dir, self.connection_url, self.terminado_settings) self.terminals_available = True except ImportError as e: self.log.warning(_i18n("Terminals not available (error was %s)"), e) def init_signal(self): - if not sys.platform.startswith('win') and sys.stdin and sys.stdin.isatty(): + if not sys.platform.startswith("win") and sys.stdin and sys.stdin.isatty(): signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._signal_stop) - if hasattr(signal, 'SIGUSR1'): + if hasattr(signal, "SIGUSR1"): # Windows doesn't support SIGUSR1 signal.signal(signal.SIGUSR1, self._signal_info) - if hasattr(signal, 'SIGINFO'): + if hasattr(signal, "SIGINFO"): # only on BSD-based systems signal.signal(signal.SIGINFO, self._signal_info) @@ -1768,7 +1997,7 @@ def _confirm_exit(self): This doesn't work on Windows. """ info = self.log.info - info(_i18n('interrupted')) + info(_i18n("interrupted")) # Check if answer_yes is set if self.answer_yes: self.log.critical(_i18n("Shutting down...")) @@ -1777,11 +2006,11 @@ def _confirm_exit(self): self.stop(from_signal=True) return print(self.running_server_info()) - yes = _i18n('y') - no = _i18n('n') + yes = _i18n("y") + no = _i18n("n") sys.stdout.write(_i18n("Shutdown this Jupyter server (%s/[%s])? ") % (yes, no)) sys.stdout.flush() - r,w,x = select.select([sys.stdin], [], [], 5) + r, w, x = select.select([sys.stdin], [], [], 5) if r: line = sys.stdin.readline() if line.lower().startswith(yes) and no not in line.lower(): @@ -1791,7 +2020,7 @@ def _confirm_exit(self): self.stop(from_signal=True) return else: - print(_i18n("No answer for 5s:"), end=' ') + print(_i18n("No answer for 5s:"), end=" ") print(_i18n("resuming operation...")) # no answer, or answer is no: # set it back to original SIGINT handler @@ -1868,39 +2097,40 @@ def init_mime_overrides(self): # reject these files. We know the mimetype always needs to be text/css for css # and application/javascript for JS, so we override it here # and explicitly tell the mimetypes to not trust the Windows registry - if os.name == 'nt': + if os.name == "nt": # do not trust windows registry, which regularly has bad info mimetypes.init(files=[]) # ensure css, js are correct, which are required for pages to function - mimetypes.add_type('text/css', '.css') - mimetypes.add_type('application/javascript', '.js') + mimetypes.add_type("text/css", ".css") + mimetypes.add_type("application/javascript", ".js") # for python <3.8 - mimetypes.add_type('application/wasm', '.wasm') + mimetypes.add_type("application/wasm", ".wasm") def shutdown_no_activity(self): """Shutdown server on timeout when there are no kernels or terminals.""" km = self.kernel_manager if len(km) != 0: - return # Kernels still running + return # Kernels still running if self.terminals_available: - term_mgr = self.web_app.settings['terminal_manager'] + term_mgr = self.web_app.settings["terminal_manager"] if term_mgr.terminals: - return # Terminals still running + return # Terminals still running - seconds_since_active = \ - (utcnow() - self.web_app.last_activity()).total_seconds() - self.log.debug("No activity for %d seconds.", - seconds_since_active) + seconds_since_active = (utcnow() - self.web_app.last_activity()).total_seconds() + self.log.debug("No activity for %d seconds.", seconds_since_active) if seconds_since_active > self.shutdown_no_activity_timeout: - self.log.info("No kernels or terminals for %d seconds; shutting down.", - seconds_since_active) + self.log.info( + "No kernels or terminals for %d seconds; shutting down.", seconds_since_active + ) self.stop() def init_shutdown_no_activity(self): if self.shutdown_no_activity_timeout > 0: - self.log.info("Will shut down after %d seconds with no kernels or terminals.", - self.shutdown_no_activity_timeout) + self.log.info( + "Will shut down after %d seconds with no kernels or terminals.", + self.shutdown_no_activity_timeout, + ) pc = ioloop.PeriodicCallback(self.shutdown_no_activity, 60000) pc.start() @@ -1911,19 +2141,21 @@ def http_server(self): return self._http_server except AttributeError as e: raise AttributeError( - 'An HTTPServer instance has not been created for the ' - 'Server Web Application. To create an HTTPServer for this ' - 'application, call `.init_httpserver()`.' - ) from e + "An HTTPServer instance has not been created for the " + "Server Web Application. To create an HTTPServer for this " + "application, call `.init_httpserver()`." + ) from e def init_httpserver(self): """Creates an instance of a Tornado HTTPServer for the Server Web Application and sets the http_server attribute. """ # Check that a web_app has been initialized before starting a server. - if not hasattr(self, 'web_app'): - raise AttributeError('A tornado web application has not be initialized. ' - 'Try calling `.init_webapp()` first.') + if not hasattr(self, "web_app"): + raise AttributeError( + "A tornado web application has not be initialized. " + "Try calling `.init_webapp()` first." + ) # Create an instance of the server. self._http_server = httpserver.HTTPServer( @@ -1931,13 +2163,17 @@ def init_httpserver(self): ssl_options=self.ssl_options, xheaders=self.trust_xheaders, max_body_size=self.max_body_size, - max_buffer_size=self.max_buffer_size + max_buffer_size=self.max_buffer_size, ) success = self._bind_http_server() if not success: - self.log.critical(_i18n('ERROR: the Jupyter server could not be started because ' - 'no available port could be found.')) + self.log.critical( + _i18n( + "ERROR: the Jupyter server could not be started because " + "no available port could be found." + ) + ) self.exit(1) def _bind_http_server(self): @@ -1945,7 +2181,7 @@ def _bind_http_server(self): def _bind_http_server_unix(self): if unix_socket_in_use(self.sock): - self.log.warning(_i18n('The socket %s is already in use.') % self.sock) + self.log.warning(_i18n("The socket %s is already in use.") % self.sock) return False try: @@ -1953,9 +2189,9 @@ def _bind_http_server_unix(self): self.http_server.add_socket(sock) except socket.error as e: if e.errno == errno.EADDRINUSE: - self.log.warning(_i18n('The socket %s is already in use.') % self.sock) + self.log.warning(_i18n("The socket %s is already in use.") % self.sock) return False - elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): + elif e.errno in (errno.EACCES, getattr(errno, "WSAEACCES", errno.EACCES)): self.log.warning(_i18n("Permission to listen on sock %s denied") % self.sock) return False else: @@ -1965,17 +2201,19 @@ def _bind_http_server_unix(self): def _bind_http_server_tcp(self): success = None - for port in random_ports(self.port, self.port_retries+1): + for port in random_ports(self.port, self.port_retries + 1): try: self.http_server.listen(port, self.ip) except socket.error as e: if e.errno == errno.EADDRINUSE: if self.port_retries: - self.log.info(_i18n('The port %i is already in use, trying another port.') % port) + self.log.info( + _i18n("The port %i is already in use, trying another port.") % port + ) else: - self.log.info(_i18n('The port %i is already in use.') % port) + self.log.info(_i18n("The port %i is already in use.") % port) continue - elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): + elif e.errno in (errno.EACCES, getattr(errno, "WSAEACCES", errno.EACCES)): self.log.warning(_i18n("Permission to listen on port %i denied.") % port) continue else: @@ -1986,15 +2224,23 @@ def _bind_http_server_tcp(self): break if not success: if self.port_retries: - self.log.critical(_i18n('ERROR: the Jupyter server could not be started because ' - 'no available port could be found.')) + self.log.critical( + _i18n( + "ERROR: the Jupyter server could not be started because " + "no available port could be found." + ) + ) else: - self.log.critical(_i18n('ERROR: the Jupyter server could not be started because ' - 'port %i is not available.') % port) + self.log.critical( + _i18n( + "ERROR: the Jupyter server could not be started because " + "port %i is not available." + ) + % port + ) self.exit(1) return success - @staticmethod def _init_asyncio_patch(): """set default asyncio policy to be compatible with tornado @@ -2019,15 +2265,14 @@ def _init_asyncio_patch(): pass # not affected else: - if ( - type(asyncio.get_event_loop_policy()) - is WindowsProactorEventLoopPolicy - ): + if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: # prefer Selector to Proactor for tornado + pyzmq asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) @catch_config_error - def initialize(self, argv=None, find_extensions=True, new_httpserver=True, starter_extension=None): + def initialize( + self, argv=None, find_extensions=True, new_httpserver=True, starter_extension=None + ): """Initialize the Server application class, configurables, web application, and http server. Parameters @@ -2090,7 +2335,9 @@ async def cleanup_kernels(self): but explicit shutdown allows the KernelManagers to cleanup the connection files. """ n_kernels = len(self.kernel_manager.list_kernel_ids()) - kernel_msg = trans.ngettext('Shutting down %d kernel', 'Shutting down %d kernels', n_kernels) + kernel_msg = trans.ngettext( + "Shutting down %d kernel", "Shutting down %d kernels", n_kernels + ) self.log.info(kernel_msg % n_kernels) await run_sync_in_loop(self.kernel_manager.shutdown_all()) @@ -2103,9 +2350,11 @@ async def cleanup_terminals(self): if not self.terminals_available: return - terminal_manager = self.web_app.settings['terminal_manager'] + terminal_manager = self.web_app.settings["terminal_manager"] n_terminals = len(terminal_manager.list()) - terminal_msg = trans.ngettext('Shutting down %d terminal', 'Shutting down %d terminals', n_terminals) + terminal_msg = trans.ngettext( + "Shutting down %d terminal", "Shutting down %d terminals", n_terminals + ) self.log.info(terminal_msg % n_terminals) await run_sync_in_loop(terminal_manager.terminate_all()) @@ -2113,14 +2362,10 @@ async def cleanup_extensions(self): """Call shutdown hooks in all extensions.""" n_extensions = len(self.extension_manager.extension_apps) extension_msg = trans.ngettext( - 'Shutting down %d extension', - 'Shutting down %d extensions', - n_extensions + "Shutting down %d extension", "Shutting down %d extensions", n_extensions ) self.log.info(extension_msg % n_extensions) - await run_sync_in_loop( - self.extension_manager.stop_all_extensions(self) - ) + await run_sync_in_loop(self.extension_manager.stop_all_extensions(self)) def running_server_info(self, kernel_count=True): "Return the current working directory and the server url information" @@ -2131,26 +2376,33 @@ def running_server_info(self, kernel_count=True): info += kernel_msg % n_kernels info += "\n" # Format the info so that the URL fits on a single line in 80 char display - info += _i18n("Jupyter Server {version} is running at:\n{url}". - format(version=ServerApp.version, url=self.display_url)) + info += _i18n( + "Jupyter Server {version} is running at:\n{url}".format( + version=ServerApp.version, url=self.display_url + ) + ) if self.gateway_config.gateway_enabled: - info += _i18n("\nKernels will be managed by the Gateway server running at:\n%s") % self.gateway_config.url + info += ( + _i18n("\nKernels will be managed by the Gateway server running at:\n%s") + % self.gateway_config.url + ) return info def server_info(self): """Return a JSONable dict of information about this server.""" - return {'url': self.connection_url, - 'hostname': self.ip if self.ip else 'localhost', - 'port': self.port, - 'sock': self.sock, - 'secure': bool(self.certfile), - 'base_url': self.base_url, - 'token': self.token, - 'root_dir': os.path.abspath(self.root_dir), - 'password': bool(self.password), - 'pid': os.getpid(), - 'version': ServerApp.version, - } + return { + "url": self.connection_url, + "hostname": self.ip if self.ip else "localhost", + "port": self.port, + "sock": self.sock, + "secure": bool(self.certfile), + "base_url": self.base_url, + "token": self.token, + "root_dir": os.path.abspath(self.root_dir), + "password": bool(self.password), + "pid": os.getpid(), + "version": ServerApp.version, + } def write_server_info_file(self): """Write the result of server_info() to the JSON file info_file.""" @@ -2158,8 +2410,7 @@ def write_server_info_file(self): with secure_write(self.info_file) as f: json.dump(self.server_info(), f, indent=2, sort_keys=True) except OSError as e: - self.log.error(_i18n("Failed to write server-info to %s: %s"), - self.info_file, e) + self.log.error(_i18n("Failed to write server-info to %s: %s"), self.info_file, e) def remove_server_info_file(self): """Remove the jpserver-.json file created for this server. @@ -2170,7 +2421,7 @@ def remove_server_info_file(self): os.unlink(self.info_file) except OSError as e: if e.errno != errno.ENOENT: - raise; + raise def _resolve_file_to_run_and_root_dir(self): """Returns a relative path from file_to_run @@ -2205,11 +2456,11 @@ def _resolve_file_to_run_and_root_dir(self): def _write_browser_open_file(self, url, fh): if self.token: - url = url_concat(url, {'token': self.token}) + url = url_concat(url, {"token": self.token}) url = url_path_join(self.connection_url, url) - jinja2_env = self.web_app.settings['jinja2_env'] - template = jinja2_env.get_template('browser-open.html') + jinja2_env = self.web_app.settings["jinja2_env"] + template = jinja2_env.get_template("browser-open.html") fh.write(template.render(open_url=url, base_url=self.base_url)) def write_browser_open_files(self): @@ -2230,7 +2481,7 @@ def write_browser_open_files(self): url_path_join(self.file_url_prefix, *file_to_run_relpath.split(os.sep)) ) - with open(self.browser_open_file_to_run, 'w', encoding='utf-8') as f: + with open(self.browser_open_file_to_run, "w", encoding="utf-8") as f: self._write_browser_open_file(file_open_url, f) def write_browser_open_file(self): @@ -2239,9 +2490,9 @@ def write_browser_open_file(self): This can be used to open the notebook in a browser """ # default_url contains base_url, but so does connection_url - open_url = self.default_url[len(self.base_url):] + open_url = self.default_url[len(self.base_url) :] - with open(self.browser_open_file, 'w', encoding='utf-8') as f: + with open(self.browser_open_file, "w", encoding="utf-8") as f: self._write_browser_open_file(open_url, f) def remove_browser_open_files(self): @@ -2270,10 +2521,10 @@ def remove_browser_open_file(self): def _prepare_browser_open(self): if not self.use_redirect_file: - uri = self.default_url[len(self.base_url):] + uri = self.default_url[len(self.base_url) :] if self.token: - uri = url_concat(uri, {'token': self.token}) + uri = url_concat(uri, {"token": self.token}) if self.file_to_run: # Create a separate, temporary open-browser-file @@ -2284,7 +2535,7 @@ def _prepare_browser_open(self): open_file = self.browser_open_file if self.use_redirect_file: - assembled_url = urljoin('file:', pathname2url(open_file)) + assembled_url = urljoin("file:", pathname2url(open_file)) else: assembled_url = url_path_join(self.connection_url, uri) @@ -2294,7 +2545,7 @@ def launch_browser(self): try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: - self.log.warning(_i18n('No web browser found: %s.') % e) + self.log.warning(_i18n("No web browser found: %s.") % e) browser = None if not browser: @@ -2313,20 +2564,30 @@ def start_app(self): try: uid = os.geteuid() except AttributeError: - uid = -1 # anything nonzero here, since we can't check UID assume non-root + uid = -1 # anything nonzero here, since we can't check UID assume non-root if uid == 0: - self.log.critical(_i18n("Running as root is not recommended. Use --allow-root to bypass.")) + self.log.critical( + _i18n("Running as root is not recommended. Use --allow-root to bypass.") + ) self.exit(1) info = self.log.info for line in self.running_server_info(kernel_count=False).split("\n"): info(line) - info(_i18n("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).")) - if 'dev' in __version__: - info(_i18n("Welcome to Project Jupyter! Explore the various tools available" - " and their corresponding documentation. If you are interested" - " in contributing to the platform, please visit the community" - " resources section at https://jupyter.org/community.html.")) + info( + _i18n( + "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." + ) + ) + if "dev" in __version__: + info( + _i18n( + "Welcome to Project Jupyter! Explore the various tools available" + " and their corresponding documentation. If you are interested" + " in contributing to the platform, please visit the community" + " resources section at https://jupyter.org/community.html." + ) + ) self.write_server_info_file() self.write_browser_open_files() @@ -2339,24 +2600,33 @@ def start_app(self): # log full URL with generated token, so there's a copy/pasteable link # with auth info. if self.sock: - self.log.critical('\n'.join([ - '\n', - 'Jupyter Server is listening on %s' % self.display_url, - '', - ( - 'UNIX sockets are not browser-connectable, but you can tunnel to ' - 'the instance via e.g.`ssh -L 8888:%s -N user@this_host` and then ' - 'open e.g. %s in a browser.' - ) % (self.sock, self.connection_url) - ])) + self.log.critical( + "\n".join( + [ + "\n", + "Jupyter Server is listening on %s" % self.display_url, + "", + ( + "UNIX sockets are not browser-connectable, but you can tunnel to " + "the instance via e.g.`ssh -L 8888:%s -N user@this_host` and then " + "open e.g. %s in a browser." + ) + % (self.sock, self.connection_url), + ] + ) + ) else: - self.log.critical('\n'.join([ - '\n', - 'To access the server, open this file in a browser:', - ' %s' % urljoin('file:', pathname2url(self.browser_open_file)), - 'Or copy and paste one of these URLs:', - ' %s' % self.display_url, - ])) + self.log.critical( + "\n".join( + [ + "\n", + "To access the server, open this file in a browser:", + " %s" % urljoin("file:", pathname2url(self.browser_open_file)), + "Or copy and paste one of these URLs:", + " %s" % self.display_url, + ] + ) + ) async def _cleanup(self): """General cleanup of files, extensions and kernels created @@ -2370,10 +2640,10 @@ async def _cleanup(self): def start_ioloop(self): """Start the IO Loop.""" - if sys.platform.startswith('win'): + if sys.platform.startswith("win"): # add no-op to wake every 5s # to handle signals that may be ignored by the inner loop - pc = ioloop.PeriodicCallback(lambda : None, 5000) + pc = ioloop.PeriodicCallback(lambda: None, 5000) pc.start() try: self.io_loop.start() @@ -2385,7 +2655,7 @@ def init_ioloop(self): self.io_loop = ioloop.IOLoop.current() def start(self): - """ Start the Jupyter server app, after initialization + """Start the Jupyter server app, after initialization This method takes no arguments so all configuration and initialization must be done prior to calling this method.""" @@ -2399,10 +2669,10 @@ async def _stop(self): def stop(self, from_signal=False): """Cleanup resources and stop the server.""" - if hasattr(self, '_http_server'): + if hasattr(self, "_http_server"): # Stop a server if its set. self.http_server.stop() - if getattr(self, 'io_loop', None): + if getattr(self, "io_loop", None): # use IOLoop.add_callback because signal.signal must be called # from main thread if from_signal: @@ -2426,13 +2696,13 @@ def list_running_servers(runtime_dir=None): return for file_name in os.listdir(runtime_dir): - if re.match('jpserver-(.+).json', file_name): - with io.open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: + if re.match("jpserver-(.+).json", file_name): + with io.open(os.path.join(runtime_dir, file_name), encoding="utf-8") as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field - if ('pid' in info) and check_pid(info['pid']): + if ("pid" in info) and check_pid(info["pid"]): yield info else: # If the process has died, try to delete its info file @@ -2440,8 +2710,10 @@ def list_running_servers(runtime_dir=None): os.unlink(os.path.join(runtime_dir, file_name)) except OSError: pass # TODO: This should warn or log or something -#----------------------------------------------------------------------------- + + +# ----------------------------------------------------------------------------- # Main entry point -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- main = launch_new_instance = ServerApp.launch_instance diff --git a/jupyter_server/services/api/api.yaml b/jupyter_server/services/api/api.yaml index d7bcc2df9a..844831e045 100644 --- a/jupyter_server/services/api/api.yaml +++ b/jupyter_server/services/api/api.yaml @@ -1,4 +1,4 @@ -swagger: '2.0' +swagger: "2.0" info: title: Jupyter Server API description: Server API @@ -53,11 +53,9 @@ parameters: type: string paths: - - /api/contents/{path}: parameters: - - $ref: '#/parameters/path' + - $ref: "#/parameters/path" get: summary: Get contents of file or directory description: "A client can optionally specify a type and/or format argument via URL parameter. When given, the Contents service shall return a model in the requested type and/or format. If the request cannot be satisfied, e.g. type=text is requested, but the file is binary, then the request shall fail with 400 and have a JSON response containing a 'reason' field, with the value 'bad format' or 'bad type', depending on what was requested." @@ -104,7 +102,7 @@ paths: type: string format: dateTime schema: - $ref: '#/definitions/Contents' + $ref: "#/definitions/Contents" 500: description: Model key error post: @@ -134,7 +132,7 @@ paths: type: string format: url schema: - $ref: '#/definitions/Contents' + $ref: "#/definitions/Contents" 404: description: No item found 400: @@ -173,7 +171,7 @@ paths: type: string format: url schema: - $ref: '#/definitions/Contents' + $ref: "#/definitions/Contents" 400: description: No data provided schema: @@ -221,7 +219,7 @@ paths: type: string format: url schema: - $ref: '#/definitions/Contents' + $ref: "#/definitions/Contents" 201: description: Path created headers: @@ -230,7 +228,7 @@ paths: type: string format: url schema: - $ref: '#/definitions/Contents' + $ref: "#/definitions/Contents" 400: description: No data provided schema: @@ -256,7 +254,7 @@ paths: format: url /api/contents/{path}/checkpoints: parameters: - - $ref: '#/parameters/path' + - $ref: "#/parameters/path" get: summary: Get a list of checkpoints for a file description: List checkpoints for a given file. There will typically be zero or one results. @@ -281,7 +279,7 @@ paths: schema: type: array items: - $ref: '#/definitions/Checkpoints' + $ref: "#/definitions/Checkpoints" 500: description: Model key error post: @@ -298,7 +296,7 @@ paths: type: string format: url schema: - $ref: '#/definitions/Checkpoints' + $ref: "#/definitions/Checkpoints" 404: description: No item found 400: @@ -346,7 +344,7 @@ paths: description: Checkpoint deleted /api/sessions/{session}: parameters: - - $ref: '#/parameters/session' + - $ref: "#/parameters/session" get: summary: Get session tags: @@ -355,7 +353,7 @@ paths: 200: description: Session schema: - $ref: '#/definitions/Session' + $ref: "#/definitions/Session" patch: summary: "This can be used to rename the session." tags: @@ -365,12 +363,12 @@ paths: in: body required: true schema: - $ref: '#/definitions/Session' + $ref: "#/definitions/Session" responses: 200: description: Session schema: - $ref: '#/definitions/Session' + $ref: "#/definitions/Session" 400: description: No data provided delete: @@ -393,7 +391,7 @@ paths: schema: type: array items: - $ref: '#/definitions/Session' + $ref: "#/definitions/Session" post: summary: "Create a new session, or return an existing session if a session of the same name already exists" tags: @@ -402,12 +400,12 @@ paths: - name: session in: body schema: - $ref: '#/definitions/Session' + $ref: "#/definitions/Session" responses: 201: description: Session created or returned schema: - $ref: '#/definitions/Session' + $ref: "#/definitions/Session" headers: Location: description: URL for session commands @@ -435,7 +433,7 @@ paths: schema: type: array items: - $ref: '#/definitions/Kernel' + $ref: "#/definitions/Kernel" post: summary: Start a kernel and return the uuid tags: @@ -458,7 +456,7 @@ paths: 201: description: Kernel started schema: - $ref: '#/definitions/Kernel' + $ref: "#/definitions/Kernel" headers: Location: description: Model for started kernel @@ -466,7 +464,7 @@ paths: format: url /api/kernels/{kernel_id}: parameters: - - $ref: '#/parameters/kernel' + - $ref: "#/parameters/kernel" get: summary: Get kernel information tags: @@ -475,7 +473,7 @@ paths: 200: description: Kernel information schema: - $ref: '#/definitions/Kernel' + $ref: "#/definitions/Kernel" delete: summary: Kill a kernel and delete the kernel id tags: @@ -485,7 +483,7 @@ paths: description: Kernel deleted /api/kernels/{kernel_id}/interrupt: parameters: - - $ref: '#/parameters/kernel' + - $ref: "#/parameters/kernel" post: summary: Interrupt a kernel tags: @@ -495,7 +493,7 @@ paths: description: Kernel interrupted /api/kernels/{kernel_id}/restart: parameters: - - $ref: '#/parameters/kernel' + - $ref: "#/parameters/kernel" post: summary: Restart a kernel tags: @@ -509,7 +507,7 @@ paths: type: string format: url schema: - $ref: '#/definitions/Kernel' + $ref: "#/definitions/Kernel" /api/kernelspecs: get: @@ -528,7 +526,7 @@ paths: kernelspecs: type: object additionalProperties: - $ref: '#/definitions/KernelSpec' + $ref: "#/definitions/KernelSpec" /api/config/{section_name}: get: summary: Get a configuration section by name @@ -568,7 +566,7 @@ paths: schema: type: array items: - $ref: '#/definitions/Terminal' + $ref: "#/definitions/Terminal" 403: description: Forbidden to access 404: @@ -582,7 +580,7 @@ paths: 200: description: Succesfully created a new terminal schema: - $ref: '#/definitions/Terminal' + $ref: "#/definitions/Terminal" 403: description: Forbidden to access 404: @@ -594,12 +592,12 @@ paths: tags: - terminals parameters: - - $ref: '#/parameters/terminal_id' + - $ref: "#/parameters/terminal_id" responses: 200: description: Terminal session with given id schema: - $ref: '#/definitions/Terminal' + $ref: "#/definitions/Terminal" 403: description: Forbidden to access 404: @@ -610,7 +608,7 @@ paths: tags: - terminals parameters: - - $ref: '#/parameters/terminal_id' + - $ref: "#/parameters/terminal_id" responses: 204: description: Succesfully deleted terminal session @@ -619,9 +617,6 @@ paths: 404: description: Not found - - - /api/status: get: summary: Get the current status/activity of the server. @@ -631,7 +626,7 @@ paths: 200: description: The current status of the server schema: - $ref: '#/definitions/APIStatus' + $ref: "#/definitions/APIStatus" /api/spec.yaml: get: @@ -644,7 +639,7 @@ paths: 200: description: The current spec for the notebook server's APIs. schema: - type: file + type: file definitions: APIStatus: description: | @@ -675,7 +670,7 @@ definitions: type: string description: Unique name for kernel KernelSpecFile: - $ref: '#/definitions/KernelSpecFile' + $ref: "#/definitions/KernelSpecFile" resources: type: object properties: @@ -723,8 +718,8 @@ definitions: items: type: object required: - - text - - url + - text + - url properties: text: type: string @@ -780,7 +775,7 @@ definitions: type: string description: session type kernel: - $ref: '#/definitions/Kernel' + $ref: "#/definitions/Kernel" Contents: description: "A contents object. The content and format keys may be null if content is not contained. If type is 'file', then the mimetype will be null." type: object diff --git a/jupyter_server/services/api/handlers.py b/jupyter_server/services/api/handlers.py index 571c643d51..dd47bfd4c7 100644 --- a/jupyter_server/services/api/handlers.py +++ b/jupyter_server/services/api/handlers.py @@ -1,30 +1,29 @@ """Tornado handlers for api specifications.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import json import os from tornado import web -from ...base.handlers import JupyterHandler, APIHandler +from ...base.handlers import APIHandler +from ...base.handlers import JupyterHandler +from jupyter_server._tz import isoformat +from jupyter_server._tz import utcfromtimestamp from jupyter_server.utils import ensure_async -from jupyter_server._tz import utcfromtimestamp, isoformat class APISpecHandler(web.StaticFileHandler, JupyterHandler): - def initialize(self): web.StaticFileHandler.initialize(self, path=os.path.dirname(__file__)) @web.authenticated def get(self): self.log.warning("Serving api spec (experimental, incomplete)") - return web.StaticFileHandler.get(self, 'api.yaml') + return web.StaticFileHandler.get(self, "api.yaml") def get_content_type(self): - return 'text/x-yaml' + return "text/x-yaml" class APIStatusHandler(APIHandler): @@ -34,17 +33,17 @@ class APIStatusHandler(APIHandler): @web.authenticated async def get(self): # if started was missing, use unix epoch - started = self.settings.get('started', utcfromtimestamp(0)) + started = self.settings.get("started", utcfromtimestamp(0)) started = isoformat(started) kernels = await ensure_async(self.kernel_manager.list_kernels()) - total_connections = sum(k['connections'] for k in kernels) + total_connections = sum(k["connections"] for k in kernels) last_activity = isoformat(self.application.last_activity()) model = { - 'started': started, - 'last_activity': last_activity, - 'kernels': len(kernels), - 'connections': total_connections, + "started": started, + "last_activity": last_activity, + "kernels": len(kernels), + "connections": total_connections, } self.finish(json.dumps(model, sort_keys=True)) diff --git a/jupyter_server/services/config/handlers.py b/jupyter_server/services/config/handlers.py index 245b344bf3..783cf49321 100644 --- a/jupyter_server/services/config/handlers.py +++ b/jupyter_server/services/config/handlers.py @@ -1,17 +1,17 @@ """Tornado handlers for frontend config storage.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json + from tornado import web from ...base.handlers import APIHandler -class ConfigHandler(APIHandler): +class ConfigHandler(APIHandler): @web.authenticated def get(self, section_name): - self.set_header("Content-Type", 'application/json') + self.set_header("Content-Type", "application/json") self.finish(json.dumps(self.config_manager.get(section_name))) @web.authenticated diff --git a/jupyter_server/services/config/manager.py b/jupyter_server/services/config/manager.py index 1744d4ef4e..b252cf5264 100644 --- a/jupyter_server/services/config/manager.py +++ b/jupyter_server/services/config/manager.py @@ -2,22 +2,27 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import os.path -from jupyter_server.config_manager import BaseJSONConfigManager, recursive_update -from jupyter_core.paths import jupyter_config_dir, jupyter_config_path -from traitlets import Unicode, Instance, List, observe, default +from jupyter_core.paths import jupyter_config_dir +from jupyter_core.paths import jupyter_config_path +from traitlets import default +from traitlets import Instance +from traitlets import List +from traitlets import observe +from traitlets import Unicode from traitlets.config import LoggingConfigurable +from jupyter_server.config_manager import BaseJSONConfigManager +from jupyter_server.config_manager import recursive_update + class ConfigManager(LoggingConfigurable): """Config Manager used for storing frontend config""" - config_dir_name = Unicode( - "serverconfig", - help="""Name of the config directory.""" - ).tag(config=True) + config_dir_name = Unicode("serverconfig", help="""Name of the config directory.""").tag( + config=True + ) # Public API @@ -42,22 +47,22 @@ def update(self, section_name, new_data): read_config_path = List(Unicode()) - @default('read_config_path') + @default("read_config_path") def _default_read_config_path(self): return [os.path.join(p, self.config_dir_name) for p in jupyter_config_path()] write_config_dir = Unicode() - @default('write_config_dir') + @default("write_config_dir") def _default_write_config_dir(self): return os.path.join(jupyter_config_dir(), self.config_dir_name) write_config_manager = Instance(BaseJSONConfigManager) - @default('write_config_manager') + @default("write_config_manager") def _default_write_config_manager(self): return BaseJSONConfigManager(config_dir=self.write_config_dir) - @observe('write_config_dir') + @observe("write_config_dir") def _update_write_config_dir(self, change): self.write_config_manager = BaseJSONConfigManager(config_dir=self.write_config_dir) diff --git a/jupyter_server/services/contents/checkpoints.py b/jupyter_server/services/contents/checkpoints.py index 7ac5bce4ba..a3a3356b4e 100644 --- a/jupyter_server/services/contents/checkpoints.py +++ b/jupyter_server/services/contents/checkpoints.py @@ -1,12 +1,9 @@ """ Classes for managing Checkpoints. """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - from tornado.web import HTTPError - from traitlets.config.configurable import LoggingConfigurable @@ -22,6 +19,7 @@ class Checkpoints(LoggingConfigurable): delete_checkpoint(self, checkpoint_id, path) list_checkpoints(self, path) """ + def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" raise NotImplementedError("must be implemented in a subclass") @@ -45,12 +43,12 @@ def list_checkpoints(self, path): def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" for cp in self.list_checkpoints(old_path): - self.rename_checkpoint(cp['id'], old_path, new_path) + self.rename_checkpoint(cp["id"], old_path, new_path) def delete_all_checkpoints(self, path): """Delete all checkpoints for the given path.""" for checkpoint in self.list_checkpoints(path): - self.delete_checkpoint(checkpoint['id'], path) + self.delete_checkpoint(checkpoint["id"], path) class GenericCheckpointsMixin(object): @@ -77,30 +75,30 @@ class GenericCheckpointsMixin(object): def create_checkpoint(self, contents_mgr, path): model = contents_mgr.get(path, content=True) - type = model['type'] - if type == 'notebook': + type = model["type"] + if type == "notebook": return self.create_notebook_checkpoint( - model['content'], + model["content"], path, ) - elif type == 'file': + elif type == "file": return self.create_file_checkpoint( - model['content'], - model['format'], + model["content"], + model["format"], path, ) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, u"Unexpected type %s" % type) def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" - type = contents_mgr.get(path, content=False)['type'] - if type == 'notebook': + type = contents_mgr.get(path, content=False)["type"] + if type == "notebook": model = self.get_notebook_checkpoint(checkpoint_id, path) - elif type == 'file': + elif type == "file": model = self.get_file_checkpoint(checkpoint_id, path) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, u"Unexpected type %s" % type) contents_mgr.save(model, path) # Required Methods @@ -146,6 +144,7 @@ class AsyncCheckpoints(Checkpoints): """ Base class for managing checkpoints for a ContentsManager asynchronously. """ + async def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" raise NotImplementedError("must be implemented in a subclass") @@ -168,13 +167,13 @@ async def list_checkpoints(self, path): async def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" - for cp in (await self.list_checkpoints(old_path)): - await self.rename_checkpoint(cp['id'], old_path, new_path) + for cp in await self.list_checkpoints(old_path): + await self.rename_checkpoint(cp["id"], old_path, new_path) async def delete_all_checkpoints(self, path): """Delete all checkpoints for the given path.""" - for checkpoint in (await self.list_checkpoints(path)): - await self.delete_checkpoint(checkpoint['id'], path) + for checkpoint in await self.list_checkpoints(path): + await self.delete_checkpoint(checkpoint["id"], path) class AsyncGenericCheckpointsMixin(GenericCheckpointsMixin): @@ -185,30 +184,30 @@ class AsyncGenericCheckpointsMixin(GenericCheckpointsMixin): async def create_checkpoint(self, contents_mgr, path): model = await contents_mgr.get(path, content=True) - type = model['type'] - if type == 'notebook': + type = model["type"] + if type == "notebook": return await self.create_notebook_checkpoint( - model['content'], + model["content"], path, ) - elif type == 'file': + elif type == "file": return await self.create_file_checkpoint( - model['content'], - model['format'], + model["content"], + model["format"], path, ) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, u"Unexpected type %s" % type) async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" - type = await contents_mgr.get(path, content=False)['type'] - if type == 'notebook': + type = await contents_mgr.get(path, content=False)["type"] + if type == "notebook": model = await self.get_notebook_checkpoint(checkpoint_id, path) - elif type == 'file': + elif type == "file": model = await self.get_file_checkpoint(checkpoint_id, path) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, u"Unexpected type %s" % type) await contents_mgr.save(model, path) # Required Methods diff --git a/jupyter_server/services/contents/filecheckpoints.py b/jupyter_server/services/contents/filecheckpoints.py index 5c54f0f360..0deb3998f3 100644 --- a/jupyter_server/services/contents/filecheckpoints.py +++ b/jupyter_server/services/contents/filecheckpoints.py @@ -4,20 +4,17 @@ import os import shutil -from tornado.web import HTTPError - -from .checkpoints import ( - AsyncCheckpoints, - Checkpoints, - AsyncGenericCheckpointsMixin, - GenericCheckpointsMixin, -) -from .fileio import AsyncFileManagerMixin, FileManagerMixin - from anyio.to_thread import run_sync from jupyter_core.utils import ensure_dir_exists +from tornado.web import HTTPError from traitlets import Unicode +from .checkpoints import AsyncCheckpoints +from .checkpoints import AsyncGenericCheckpointsMixin +from .checkpoints import Checkpoints +from .checkpoints import GenericCheckpointsMixin +from .fileio import AsyncFileManagerMixin +from .fileio import FileManagerMixin from jupyter_server import _tz as tz @@ -31,7 +28,7 @@ class FileCheckpoints(FileManagerMixin, Checkpoints): """ checkpoint_dir = Unicode( - '.ipynb_checkpoints', + ".ipynb_checkpoints", config=True, help="""The directory name in which to keep file checkpoints @@ -52,7 +49,7 @@ def _root_dir_default(self): # ContentsManager-dependent checkpoint API def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" - checkpoint_id = u'checkpoint' + checkpoint_id = u"checkpoint" src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) self._copy(src_path, dest_path) @@ -80,7 +77,7 @@ def rename_checkpoint(self, checkpoint_id, old_path, new_path): def delete_checkpoint(self, checkpoint_id, path): """delete a file's checkpoint""" - path = path.strip('/') + path = path.strip("/") cp_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(cp_path): self.no_such_checkpoint(path, checkpoint_id) @@ -94,7 +91,7 @@ def list_checkpoints(self, path): This contents manager currently only supports one checkpoint per file. """ - path = path.strip('/') + path = path.strip("/") checkpoint_id = "checkpoint" os_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_path): @@ -105,9 +102,9 @@ def list_checkpoints(self, path): # Checkpoint-related utilities def checkpoint_path(self, checkpoint_id, path): """find the path to a checkpoint""" - path = path.strip('/') - parent, name = ('/' + path).rsplit('/', 1) - parent = parent.strip('/') + path = path.strip("/") + parent, name = ("/" + path).rsplit("/", 1) + parent = parent.strip("/") basename, ext = os.path.splitext(name) filename = u"{name}-{checkpoint_id}{ext}".format( name=basename, @@ -133,20 +130,17 @@ def checkpoint_model(self, checkpoint_id, os_path): # Error Handling def no_such_checkpoint(self, path, checkpoint_id): - raise HTTPError( - 404, - u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id) - ) + raise HTTPError(404, u"Checkpoint does not exist: %s@%s" % (path, checkpoint_id)) class AsyncFileCheckpoints(FileCheckpoints, AsyncFileManagerMixin, AsyncCheckpoints): async def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" - checkpoint_id = u'checkpoint' + checkpoint_id = u"checkpoint" src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) await self._copy(src_path, dest_path) - return (await self.checkpoint_model(checkpoint_id, dest_path)) + return await self.checkpoint_model(checkpoint_id, dest_path) async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" @@ -180,7 +174,7 @@ async def rename_checkpoint(self, checkpoint_id, old_path, new_path): async def delete_checkpoint(self, checkpoint_id, path): """delete a file's checkpoint""" - path = path.strip('/') + path = path.strip("/") cp_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(cp_path): self.no_such_checkpoint(path, checkpoint_id) @@ -194,7 +188,7 @@ async def list_checkpoints(self, path): This contents manager currently only supports one checkpoint per file. """ - path = path.strip('/') + path = path.strip("/") checkpoint_id = "checkpoint" os_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_path): @@ -208,9 +202,10 @@ class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints): Local filesystem Checkpoints that works with any conforming ContentsManager. """ + def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" - path = path.strip('/') + path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -223,7 +218,7 @@ def create_file_checkpoint(self, content, format, path): def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" - path = path.strip('/') + path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -236,7 +231,7 @@ def create_notebook_checkpoint(self, nb, path): def get_notebook_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a notebook.""" - path = path.strip('/') + path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -244,8 +239,8 @@ def get_notebook_checkpoint(self, checkpoint_id, path): self.no_such_checkpoint(path, checkpoint_id) return { - 'type': 'notebook', - 'content': self._read_notebook( + "type": "notebook", + "content": self._read_notebook( os_checkpoint_path, as_version=4, ), @@ -253,7 +248,7 @@ def get_notebook_checkpoint(self, checkpoint_id, path): def get_file_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a file.""" - path = path.strip('/') + path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -262,9 +257,9 @@ def get_file_checkpoint(self, checkpoint_id, path): content, format = self._read_file(os_checkpoint_path, format=None) return { - 'type': 'file', - 'content': content, - 'format': format, + "type": "file", + "content": content, + "format": format, } @@ -273,9 +268,10 @@ class AsyncGenericFileCheckpoints(AsyncGenericCheckpointsMixin, AsyncFileCheckpo Asynchronous Local filesystem Checkpoints that works with any conforming ContentsManager. """ + async def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" - path = path.strip('/') + path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -288,7 +284,7 @@ async def create_file_checkpoint(self, content, format, path): async def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" - path = path.strip('/') + path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -301,7 +297,7 @@ async def create_notebook_checkpoint(self, nb, path): async def get_notebook_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a notebook.""" - path = path.strip('/') + path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -309,8 +305,8 @@ async def get_notebook_checkpoint(self, checkpoint_id, path): self.no_such_checkpoint(path, checkpoint_id) return { - 'type': 'notebook', - 'content': await self._read_notebook( + "type": "notebook", + "content": await self._read_notebook( os_checkpoint_path, as_version=4, ), @@ -318,7 +314,7 @@ async def get_notebook_checkpoint(self, checkpoint_id, path): async def get_file_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a file.""" - path = path.strip('/') + path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -327,7 +323,7 @@ async def get_file_checkpoint(self, checkpoint_id, path): content, format = await self._read_file(os_checkpoint_path, format=None) return { - 'type': 'file', - 'content': content, - 'format': format, + "type": "file", + "content": content, + "format": format, } diff --git a/jupyter_server/services/contents/fileio.py b/jupyter_server/services/contents/fileio.py index 559139fdf0..5025c4570a 100644 --- a/jupyter_server/services/contents/fileio.py +++ b/jupyter_server/services/contents/fileio.py @@ -1,43 +1,37 @@ """ Utilities for file-based Contents/Checkpoints managers. """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -from contextlib import contextmanager import errno -from functools import partial import io import os import shutil +from base64 import decodebytes +from base64 import encodebytes +from contextlib import contextmanager +from functools import partial +import nbformat from anyio.to_thread import run_sync from tornado.web import HTTPError - -from jupyter_server.utils import ( - to_api_path, - to_os_path, -) -import nbformat - - -from traitlets.config import Configurable from traitlets import Bool +from traitlets.config import Configurable -from base64 import encodebytes, decodebytes +from jupyter_server.utils import to_api_path +from jupyter_server.utils import to_os_path def replace_file(src, dst): - """ replace dst with src - """ + """replace dst with src""" os.replace(src, dst) + async def async_replace_file(src, dst): - """ replace dst with src asynchronously - """ + """replace dst with src asynchronously""" await run_sync(os.replace, src, dst) + def copy2_safe(src, dst, log=None): """copy src to dst @@ -50,6 +44,7 @@ def copy2_safe(src, dst, log=None): if log: log.debug("copystat on %s failed", dst, exc_info=True) + async def async_copy2_safe(src, dst, log=None): """copy src to dst asynchronously @@ -62,20 +57,23 @@ async def async_copy2_safe(src, dst, log=None): if log: log.debug("copystat on %s failed", dst, exc_info=True) + def path_to_intermediate(path): - '''Name of the intermediate file used in atomic writes. + """Name of the intermediate file used in atomic writes. - The .~ prefix will make Dropbox ignore the temporary file.''' + The .~ prefix will make Dropbox ignore the temporary file.""" dirname, basename = os.path.split(path) - return os.path.join(dirname, '.~'+basename) + return os.path.join(dirname, ".~" + basename) + def path_to_invalid(path): - '''Name of invalid file after a failed atomic write and subsequent read.''' + """Name of invalid file after a failed atomic write and subsequent read.""" dirname, basename = os.path.split(path) - return os.path.join(dirname, basename+'.invalid') + return os.path.join(dirname, basename + ".invalid") + @contextmanager -def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs): +def atomic_writing(path, text=True, encoding="utf-8", log=None, **kwargs): """Context manager to write to a file only if the entire write is successful. This works by copying the previous file contents to a temporary file in the @@ -108,10 +106,10 @@ def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default - kwargs.setdefault('newline', '\n') - fileobj = io.open(path, 'w', encoding=encoding, **kwargs) + kwargs.setdefault("newline", "\n") + fileobj = io.open(path, "w", encoding=encoding, **kwargs) else: - fileobj = io.open(path, 'wb', **kwargs) + fileobj = io.open(path, "wb", **kwargs) try: yield fileobj @@ -132,7 +130,7 @@ def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs): @contextmanager -def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs): +def _simple_writing(path, text=True, encoding="utf-8", log=None, **kwargs): """Context manager to write file without doing atomic writing (for weird filesystem eg: nfs). @@ -156,10 +154,10 @@ def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default - kwargs.setdefault('newline', '\n') - fileobj = io.open(path, 'w', encoding=encoding, **kwargs) + kwargs.setdefault("newline", "\n") + fileobj = io.open(path, "w", encoding=encoding, **kwargs) else: - fileobj = io.open(path, 'wb', **kwargs) + fileobj = io.open(path, "wb", **kwargs) try: yield fileobj @@ -188,10 +186,13 @@ class FileManagerMixin(Configurable): log : logging.Logger """ - use_atomic_writing = Bool(True, config=True, help= - """By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. + use_atomic_writing = Bool( + True, + config=True, + help="""By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). - If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""") + If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""", + ) @contextmanager def open(self, os_path, *args, **kwargs): @@ -214,7 +215,7 @@ def atomic_writing(self, os_path, *args, **kwargs): yield f @contextmanager - def perm_to_403(self, os_path=''): + def perm_to_403(self, os_path=""): """context manager for turning permission errors into 403.""" try: yield @@ -226,7 +227,7 @@ def perm_to_403(self, os_path=''): if not os_path: os_path = e.filename or "unknown file" path = to_api_path(os_path, root=self.root_dir) - raise HTTPError(403, u'Permission denied: %s' % path) from e + raise HTTPError(403, u"Permission denied: %s" % path) from e else: raise @@ -262,7 +263,7 @@ def _get_os_path(self, path): def _read_notebook(self, os_path, as_version=4): """Read a notebook from an os path.""" - with self.open(os_path, 'r', encoding='utf-8') as f: + with self.open(os_path, "r", encoding="utf-8") as f: try: return nbformat.read(f, as_version=as_version) except Exception as e: @@ -287,7 +288,7 @@ def _read_notebook(self, os_path, as_version=4): def _save_notebook(self, os_path, nb): """Save a notebook to an os_path.""" - with self.atomic_writing(os_path, encoding='utf-8') as f: + with self.atomic_writing(os_path, encoding="utf-8") as f: nbformat.write(nb, f, version=nbformat.NO_CONVERT) def _read_file(self, os_path, format): @@ -302,48 +303,48 @@ def _read_file(self, os_path, format): if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) - with self.open(os_path, 'rb') as f: + with self.open(os_path, "rb") as f: bcontent = f.read() - if format is None or format == 'text': + if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: - return bcontent.decode('utf8'), 'text' + return bcontent.decode("utf8"), "text" except UnicodeError as e: - if format == 'text': + if format == "text": raise HTTPError( 400, "%s is not UTF-8 encoded" % os_path, - reason='bad format', + reason="bad format", ) from e - return encodebytes(bcontent).decode('ascii'), 'base64' + return encodebytes(bcontent).decode("ascii"), "base64" def _save_file(self, os_path, content, format): """Save content of a generic file.""" - if format not in {'text', 'base64'}: + if format not in {"text", "base64"}: raise HTTPError( 400, "Must specify format of file contents as 'text' or 'base64'", ) try: - if format == 'text': - bcontent = content.encode('utf8') + if format == "text": + bcontent = content.encode("utf8") else: - b64_bytes = content.encode('ascii') + b64_bytes = content.encode("ascii") bcontent = decodebytes(b64_bytes) except Exception as e: - raise HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) - ) from e + raise HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) from e with self.atomic_writing(os_path, text=False) as f: f.write(bcontent) + class AsyncFileManagerMixin(FileManagerMixin): """ Mixin for ContentsAPI classes that interact with the filesystem asynchronously. """ + async def _copy(self, src, dest): """copy src to dest @@ -353,7 +354,7 @@ async def _copy(self, src, dest): async def _read_notebook(self, os_path, as_version=4): """Read a notebook from an os path.""" - with self.open(os_path, 'r', encoding='utf-8') as f: + with self.open(os_path, "r", encoding="utf-8") as f: try: return await run_sync(partial(nbformat.read, as_version=as_version), f) except Exception as e: @@ -378,7 +379,7 @@ async def _read_notebook(self, os_path, as_version=4): async def _save_notebook(self, os_path, nb): """Save a notebook to an os_path.""" - with self.atomic_writing(os_path, encoding='utf-8') as f: + with self.atomic_writing(os_path, encoding="utf-8") as f: await run_sync(partial(nbformat.write, version=nbformat.NO_CONVERT), nb, f) async def _read_file(self, os_path, format): @@ -393,40 +394,38 @@ async def _read_file(self, os_path, format): if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) - with self.open(os_path, 'rb') as f: + with self.open(os_path, "rb") as f: bcontent = await run_sync(f.read) - if format is None or format == 'text': + if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: - return bcontent.decode('utf8'), 'text' + return bcontent.decode("utf8"), "text" except UnicodeError as e: - if format == 'text': + if format == "text": raise HTTPError( 400, "%s is not UTF-8 encoded" % os_path, - reason='bad format', + reason="bad format", ) from e - return encodebytes(bcontent).decode('ascii'), 'base64' + return encodebytes(bcontent).decode("ascii"), "base64" async def _save_file(self, os_path, content, format): """Save content of a generic file.""" - if format not in {'text', 'base64'}: + if format not in {"text", "base64"}: raise HTTPError( 400, "Must specify format of file contents as 'text' or 'base64'", ) try: - if format == 'text': - bcontent = content.encode('utf8') + if format == "text": + bcontent = content.encode("utf8") else: - b64_bytes = content.encode('ascii') + b64_bytes = content.encode("ascii") bcontent = decodebytes(b64_bytes) except Exception as e: - raise HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) - ) from e + raise HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) from e with self.atomic_writing(os_path, text=False) as f: await run_sync(f.write, bcontent) diff --git a/jupyter_server/services/contents/filemanager.py b/jupyter_server/services/contents/filemanager.py index ee92155931..376a8db628 100644 --- a/jupyter_server/services/contents/filemanager.py +++ b/jupyter_server/services/contents/filemanager.py @@ -1,29 +1,35 @@ """A contents manager that uses the local file system for storage.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -from datetime import datetime import errno +import mimetypes import os import shutil import stat import sys -import mimetypes -import nbformat +from datetime import datetime +import nbformat from anyio.to_thread import run_sync +from ipython_genutils.importstring import import_item +from jupyter_core.paths import exists +from jupyter_core.paths import is_file_hidden +from jupyter_core.paths import is_hidden from send2trash import send2trash from tornado import web - -from .filecheckpoints import AsyncFileCheckpoints, FileCheckpoints -from .fileio import AsyncFileManagerMixin, FileManagerMixin -from .manager import AsyncContentsManager, ContentsManager - -from ipython_genutils.importstring import import_item -from traitlets import Any, Unicode, Bool, TraitError, default, validate - -from jupyter_core.paths import exists, is_hidden, is_file_hidden +from traitlets import Any +from traitlets import Bool +from traitlets import default +from traitlets import TraitError +from traitlets import Unicode +from traitlets import validate + +from .filecheckpoints import AsyncFileCheckpoints +from .filecheckpoints import FileCheckpoints +from .fileio import AsyncFileManagerMixin +from .fileio import FileManagerMixin +from .manager import AsyncContentsManager +from .manager import ContentsManager from jupyter_server import _tz as tz from jupyter_server.base.handlers import AuthenticatedFileHandler from jupyter_server.transutils import _i18n @@ -41,14 +47,17 @@ class FileContentsManager(FileManagerMixin, ContentsManager): root_dir = Unicode(config=True) - @default('root_dir') + @default("root_dir") def _default_root_dir(self): try: return self.parent.root_dir except AttributeError: return os.getcwd() - post_save_hook = Any(None, config=True, allow_none=True, + post_save_hook = Any( + None, + config=True, + allow_none=True, help="""Python callable or importstring thereof to be called on the path of a file just saved. @@ -63,12 +72,12 @@ def _default_root_dir(self): - path: the filesystem path to the file just written - model: the model representing the file - contents_manager: this ContentsManager instance - """ + """, ) - @validate('post_save_hook') + @validate("post_save_hook") def _validate_post_save_hook(self, proposal): - value = proposal['value'] + value = proposal["value"] if isinstance(value, str): value = import_item(value) if not callable(value): @@ -83,13 +92,14 @@ def run_post_save_hook(self, model, os_path): self.post_save_hook(os_path=os_path, model=model, contents_manager=self) except Exception as e: self.log.error("Post-save hook failed o-n %s", os_path, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while running post hook save: %s' % - e) from e + raise web.HTTPError( + 500, u"Unexpected error while running post hook save: %s" % e + ) from e - @validate('root_dir') + @validate("root_dir") def _validate_root_dir(self, proposal): """Do a bit of validation of the root_dir.""" - value = proposal['value'] + value = proposal["value"] if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) @@ -97,22 +107,25 @@ def _validate_root_dir(self, proposal): raise TraitError("%r is not a directory" % value) return value - @default('checkpoints_class') + @default("checkpoints_class") def _checkpoints_class_default(self): return FileCheckpoints - delete_to_trash = Bool(True, config=True, + delete_to_trash = Bool( + True, + config=True, help="""If True (default), deleting files will send them to the platform's trash/recycle bin, where they can be recovered. If False, - deleting files really deletes them.""") + deleting files really deletes them.""", + ) - @default('files_handler_class') + @default("files_handler_class") def _files_handler_class_default(self): return AuthenticatedFileHandler - @default('files_handler_params') + @default("files_handler_params") def _files_handler_params_default(self): - return {'path': self.root_dir} + return {"path": self.root_dir} def is_hidden(self, path): """Does the API style path correspond to a hidden directory or file? @@ -128,7 +141,7 @@ def is_hidden(self, path): hidden : bool Whether the path exists and is hidden. """ - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path=path) return is_hidden(os_path, self.root_dir) @@ -147,7 +160,7 @@ def file_exists(self, path): exists : bool Whether the file exists. """ - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path) return os.path.isfile(os_path) @@ -167,7 +180,7 @@ def dir_exists(self, path): exists : bool Whether the path is indeed a directory. """ - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path=path) return os.path.isdir(os_path) @@ -186,7 +199,7 @@ def exists(self, path): exists : bool Whether the target exists. """ - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path=path) return exists(os_path) @@ -199,7 +212,7 @@ def _base_model(self, path): # size of file size = info.st_size except (ValueError, OSError): - self.log.warning('Unable to get size.') + self.log.warning("Unable to get size.") size = None try: @@ -209,31 +222,31 @@ def _base_model(self, path): # https://github.com/jupyter/notebook/issues/2539 # https://github.com/jupyter/notebook/issues/2757 # Use the Unix epoch as a fallback so we don't crash. - self.log.warning('Invalid mtime %s for %s', info.st_mtime, os_path) + self.log.warning("Invalid mtime %s for %s", info.st_mtime, os_path) last_modified = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC) try: created = tz.utcfromtimestamp(info.st_ctime) except (ValueError, OSError): # See above - self.log.warning('Invalid ctime %s for %s', info.st_ctime, os_path) + self.log.warning("Invalid ctime %s for %s", info.st_ctime, os_path) created = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC) # Create the base model. model = {} - model['name'] = path.rsplit('/', 1)[-1] - model['path'] = path - model['last_modified'] = last_modified - model['created'] = created - model['content'] = None - model['format'] = None - model['mimetype'] = None - model['size'] = size + model["name"] = path.rsplit("/", 1)[-1] + model["path"] = path + model["last_modified"] = last_modified + model["created"] = created + model["content"] = None + model["format"] = None + model["mimetype"] = None + model["size"] = size try: - model['writable'] = os.access(os_path, os.W_OK) + model["writable"] = os.access(os_path, os.W_OK) except OSError: self.log.error("Failed to check write permissions on %s", os_path) - model['writable'] = False + model["writable"] = False return model def _dir_model(self, path, content=True): @@ -243,28 +256,25 @@ def _dir_model(self, path, content=True): """ os_path = self._get_os_path(path) - four_o_four = u'directory does not exist: %r' % path + four_o_four = u"directory does not exist: %r" % path if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: - self.log.info("Refusing to serve hidden directory %r, via 404 Error", - os_path - ) + self.log.info("Refusing to serve hidden directory %r, via 404 Error", os_path) raise web.HTTPError(404, four_o_four) model = self._base_model(path) - model['type'] = 'directory' - model['size'] = None + model["type"] = "directory" + model["size"] = None if content: - model['content'] = contents = [] + model["content"] = contents = [] os_dir = self._get_os_path(path) for name in os.listdir(os_dir): try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: - self.log.warning( - "failed to decode filename '%s': %s", name, e) + self.log.warning("failed to decode filename '%s': %s", name, e) continue try: @@ -277,18 +287,18 @@ def _dir_model(self, path, content=True): self.log.warning("Error stat-ing %s: %s", os_path, e) continue - if (not stat.S_ISLNK(st.st_mode) - and not stat.S_ISREG(st.st_mode) - and not stat.S_ISDIR(st.st_mode)): + if ( + not stat.S_ISLNK(st.st_mode) + and not stat.S_ISREG(st.st_mode) + and not stat.S_ISDIR(st.st_mode) + ): self.log.debug("%s not a regular file", os_path) continue try: if self.should_list(name): if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): - contents.append( - self.get(path='%s/%s' % (path, name), content=False) - ) + contents.append(self.get(path="%s/%s" % (path, name), content=False)) except OSError as e: # ELOOP: recursive symlink, also don't show failure due to permissions if e.errno not in [errno.ELOOP, errno.EACCES]: @@ -298,7 +308,7 @@ def _dir_model(self, path, content=True): exc_info=True, ) - model['format'] = 'json' + model["format"] = "json" return model @@ -313,19 +323,16 @@ def _file_model(self, path, content=True, format=None): If not specified, try to decode as UTF-8, and fall back to base64 """ model = self._base_model(path) - model['type'] = 'file' + model["type"] = "file" os_path = self._get_os_path(path) - model['mimetype'] = mimetypes.guess_type(os_path)[0] + model["mimetype"] = mimetypes.guess_type(os_path)[0] if content: content, format = self._read_file(os_path, format) - if model['mimetype'] is None: - default_mime = { - 'text': 'text/plain', - 'base64': 'application/octet-stream' - }[format] - model['mimetype'] = default_mime + if model["mimetype"] is None: + default_mime = {"text": "text/plain", "base64": "application/octet-stream"}[format] + model["mimetype"] = default_mime model.update( content=content, @@ -341,20 +348,20 @@ def _notebook_model(self, path, content=True): as a JSON structure (not double-serialized) """ model = self._base_model(path) - model['type'] = 'notebook' + model["type"] = "notebook" os_path = self._get_os_path(path) if content: nb = self._read_notebook(os_path, as_version=4) self.mark_trusted_cells(nb, path) - model['content'] = nb - model['format'] = 'json' + model["content"] = nb + model["format"] = "json" self.validate_notebook_model(model) return model def get(self, path, content=True, type=None, format=None): - """ Takes a path for an entity and returns its model + """Takes a path for an entity and returns its model Parameters ---------- @@ -375,46 +382,46 @@ def get(self, path, content=True, type=None, format=None): the contents model. If content=True, returns the contents of the file or directory as well. """ - path = path.strip('/') + path = path.strip("/") if not self.exists(path): - raise web.HTTPError(404, u'No such file or directory: %s' % path) + raise web.HTTPError(404, u"No such file or directory: %s" % path) os_path = self._get_os_path(path) if os.path.isdir(os_path): - if type not in (None, 'directory'): - raise web.HTTPError(400, - u'%s is a directory, not a %s' % (path, type), reason='bad type') + if type not in (None, "directory"): + raise web.HTTPError( + 400, u"%s is a directory, not a %s" % (path, type), reason="bad type" + ) model = self._dir_model(path, content=content) - elif type == 'notebook' or (type is None and path.endswith('.ipynb')): + elif type == "notebook" or (type is None and path.endswith(".ipynb")): model = self._notebook_model(path, content=content) else: - if type == 'directory': - raise web.HTTPError(400, - u'%s is not a directory' % path, reason='bad type') + if type == "directory": + raise web.HTTPError(400, u"%s is not a directory" % path, reason="bad type") model = self._file_model(path, content=content, format=format) return model - def _save_directory(self, os_path, model, path=''): + def _save_directory(self, os_path, model, path=""): """create a directory""" if is_hidden(os_path, self.root_dir) and not self.allow_hidden: - raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path) + raise web.HTTPError(400, u"Cannot create hidden directory %r" % os_path) if not os.path.exists(os_path): with self.perm_to_403(): os.mkdir(os_path) elif not os.path.isdir(os_path): - raise web.HTTPError(400, u'Not a directory: %s' % (os_path)) + raise web.HTTPError(400, u"Not a directory: %s" % (os_path)) else: self.log.debug("Directory %r already exists", os_path) - def save(self, model, path=''): + def save(self, model, path=""): """Save the file model and return the model with no content.""" - path = path.strip('/') + path = path.strip("/") - if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') - if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + if "type" not in model: + raise web.HTTPError(400, u"No file type provided") + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) @@ -422,35 +429,36 @@ def save(self, model, path=''): self.run_pre_save_hook(model=model, path=path) try: - if model['type'] == 'notebook': - nb = nbformat.from_dict(model['content']) + if model["type"] == "notebook": + nb = nbformat.from_dict(model["content"]) self.check_and_sign(nb, path) self._save_notebook(os_path, nb) # One checkpoint should always exist for notebooks. if not self.checkpoints.list_checkpoints(path): self.create_checkpoint(path) - elif model['type'] == 'file': + elif model["type"] == "file": # Missing format will be handled internally by _save_file. - self._save_file(os_path, model['content'], model.get('format')) - elif model['type'] == 'directory': + self._save_file(os_path, model["content"], model.get("format")) + elif model["type"] == "directory": self._save_directory(os_path, model, path) else: - raise web.HTTPError(400, "Unhandled contents type: %s" % model['type']) + raise web.HTTPError(400, "Unhandled contents type: %s" % model["type"]) except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' - % (path, e)) from e + self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError( + 500, u"Unexpected error while saving file: %s %s" % (path, e) + ) from e validation_message = None - if model['type'] == 'notebook': + if model["type"] == "notebook": self.validate_notebook_model(model) - validation_message = model.get('message', None) + validation_message = model.get("message", None) model = self.get(path, content=False) if validation_message: - model['message'] = validation_message + model["message"] = validation_message self.run_post_save_hook(model=model, os_path=os_path) @@ -458,38 +466,38 @@ def save(self, model, path=''): def delete_file(self, path): """Delete file at path.""" - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path) rm = os.unlink if not os.path.exists(os_path): - raise web.HTTPError(404, u'File or directory does not exist: %s' % os_path) + raise web.HTTPError(404, u"File or directory does not exist: %s" % os_path) def _check_trash(os_path): - if sys.platform in {'win32', 'darwin'}: + if sys.platform in {"win32", "darwin"}: return True # It's a bit more nuanced than this, but until we can better # distinguish errors from send2trash, assume that we can only trash # files on the same partition as the home directory. file_dev = os.stat(os_path).st_dev - home_dev = os.stat(os.path.expanduser('~')).st_dev + home_dev = os.stat(os.path.expanduser("~")).st_dev return file_dev == home_dev def is_non_empty_dir(os_path): if os.path.isdir(os_path): # A directory containing only leftover checkpoints is # considered empty. - cp_dir = getattr(self.checkpoints, 'checkpoint_dir', None) + cp_dir = getattr(self.checkpoints, "checkpoint_dir", None) if set(os.listdir(os_path)) - {cp_dir}: return True return False if self.delete_to_trash: - if sys.platform == 'win32' and is_non_empty_dir(os_path): + if sys.platform == "win32" and is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, u"Directory %s not empty" % os_path) if _check_trash(os_path): self.log.debug("Sending %s to trash", os_path) # Looking at the code in send2trash, I don't think the errors it @@ -498,13 +506,14 @@ def is_non_empty_dir(os_path): send2trash(os_path) return else: - self.log.warning("Skipping trash for %s, on different device " - "to home directory", os_path) + self.log.warning( + "Skipping trash for %s, on different device " "to home directory", os_path + ) if os.path.isdir(os_path): # Don't permanently delete non-empty directories. if is_non_empty_dir(os_path): - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, u"Directory %s not empty" % os_path) self.log.debug("Removing directory %s", os_path) with self.perm_to_403(): shutil.rmtree(os_path) @@ -515,8 +524,8 @@ def is_non_empty_dir(os_path): def rename_file(self, old_path, new_path): """Rename a file.""" - old_path = old_path.strip('/') - new_path = new_path.strip('/') + old_path = old_path.strip("/") + new_path = new_path.strip("/") if new_path == old_path: return @@ -525,7 +534,7 @@ def rename_file(self, old_path, new_path): # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): - raise web.HTTPError(409, u'File already exists: %s' % new_path) + raise web.HTTPError(409, u"File already exists: %s" % new_path) # Move the file try: @@ -534,8 +543,7 @@ def rename_file(self, old_path, new_path): except web.HTTPError: raise except Exception as e: - raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % - (old_path, e)) from e + raise web.HTTPError(500, u"Unknown error renaming file: %s %s" % (old_path, e)) from e def info_string(self): return _i18n("Serving notebooks from local directory: %s") % self.root_dir @@ -544,14 +552,15 @@ def get_kernel_path(self, path, model=None): """Return the initial API path of a kernel associated with a given notebook""" if self.dir_exists(path): return path - if '/' in path: - parent_dir = path.rsplit('/', 1)[0] + if "/" in path: + parent_dir = path.rsplit("/", 1)[0] else: - parent_dir = '' + parent_dir = "" return parent_dir + class AsyncFileContentsManager(FileContentsManager, AsyncFileManagerMixin, AsyncContentsManager): - @default('checkpoints_class') + @default("checkpoints_class") def _checkpoints_class_default(self): return AsyncFileCheckpoints @@ -562,29 +571,26 @@ async def _dir_model(self, path, content=True): """ os_path = self._get_os_path(path) - four_o_four = u'directory does not exist: %r' % path + four_o_four = u"directory does not exist: %r" % path if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: - self.log.info("Refusing to serve hidden directory %r, via 404 Error", - os_path - ) + self.log.info("Refusing to serve hidden directory %r, via 404 Error", os_path) raise web.HTTPError(404, four_o_four) model = self._base_model(path) - model['type'] = 'directory' - model['size'] = None + model["type"] = "directory" + model["size"] = None if content: - model['content'] = contents = [] + model["content"] = contents = [] os_dir = self._get_os_path(path) dir_contents = await run_sync(os.listdir, os_dir) for name in dir_contents: try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: - self.log.warning( - "failed to decode filename '%s': %s", name, e) + self.log.warning("failed to decode filename '%s': %s", name, e) continue try: @@ -597,9 +603,11 @@ async def _dir_model(self, path, content=True): self.log.warning("Error stat-ing %s: %s", os_path, e) continue - if (not stat.S_ISLNK(st.st_mode) - and not stat.S_ISREG(st.st_mode) - and not stat.S_ISDIR(st.st_mode)): + if ( + not stat.S_ISLNK(st.st_mode) + and not stat.S_ISREG(st.st_mode) + and not stat.S_ISDIR(st.st_mode) + ): self.log.debug("%s not a regular file", os_path) continue @@ -607,8 +615,8 @@ async def _dir_model(self, path, content=True): if self.should_list(name): if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): contents.append( - await self.get(path='%s/%s' % (path, name), content=False) - ) + await self.get(path="%s/%s" % (path, name), content=False) + ) except OSError as e: # ELOOP: recursive symlink, also don't show failure due to permissions if e.errno not in [errno.ELOOP, errno.EACCES]: @@ -618,7 +626,7 @@ async def _dir_model(self, path, content=True): exc_info=True, ) - model['format'] = 'json' + model["format"] = "json" return model @@ -633,19 +641,16 @@ async def _file_model(self, path, content=True, format=None): If not specified, try to decode as UTF-8, and fall back to base64 """ model = self._base_model(path) - model['type'] = 'file' + model["type"] = "file" os_path = self._get_os_path(path) - model['mimetype'] = mimetypes.guess_type(os_path)[0] + model["mimetype"] = mimetypes.guess_type(os_path)[0] if content: content, format = await self._read_file(os_path, format) - if model['mimetype'] is None: - default_mime = { - 'text': 'text/plain', - 'base64': 'application/octet-stream' - }[format] - model['mimetype'] = default_mime + if model["mimetype"] is None: + default_mime = {"text": "text/plain", "base64": "application/octet-stream"}[format] + model["mimetype"] = default_mime model.update( content=content, @@ -661,20 +666,20 @@ async def _notebook_model(self, path, content=True): as a JSON structure (not double-serialized) """ model = self._base_model(path) - model['type'] = 'notebook' + model["type"] = "notebook" os_path = self._get_os_path(path) if content: nb = await self._read_notebook(os_path, as_version=4) self.mark_trusted_cells(nb, path) - model['content'] = nb - model['format'] = 'json' + model["content"] = nb + model["format"] = "json" self.validate_notebook_model(model) return model async def get(self, path, content=True, type=None, format=None): - """ Takes a path for an entity and returns its model + """Takes a path for an entity and returns its model Parameters ---------- @@ -695,46 +700,46 @@ async def get(self, path, content=True, type=None, format=None): the contents model. If content=True, returns the contents of the file or directory as well. """ - path = path.strip('/') + path = path.strip("/") if not self.exists(path): - raise web.HTTPError(404, u'No such file or directory: %s' % path) + raise web.HTTPError(404, u"No such file or directory: %s" % path) os_path = self._get_os_path(path) if os.path.isdir(os_path): - if type not in (None, 'directory'): - raise web.HTTPError(400, - u'%s is a directory, not a %s' % (path, type), reason='bad type') + if type not in (None, "directory"): + raise web.HTTPError( + 400, u"%s is a directory, not a %s" % (path, type), reason="bad type" + ) model = await self._dir_model(path, content=content) - elif type == 'notebook' or (type is None and path.endswith('.ipynb')): + elif type == "notebook" or (type is None and path.endswith(".ipynb")): model = await self._notebook_model(path, content=content) else: - if type == 'directory': - raise web.HTTPError(400, - u'%s is not a directory' % path, reason='bad type') + if type == "directory": + raise web.HTTPError(400, u"%s is not a directory" % path, reason="bad type") model = await self._file_model(path, content=content, format=format) return model - async def _save_directory(self, os_path, model, path=''): + async def _save_directory(self, os_path, model, path=""): """create a directory""" if is_hidden(os_path, self.root_dir) and not self.allow_hidden: - raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path) + raise web.HTTPError(400, u"Cannot create hidden directory %r" % os_path) if not os.path.exists(os_path): with self.perm_to_403(): await run_sync(os.mkdir, os_path) elif not os.path.isdir(os_path): - raise web.HTTPError(400, u'Not a directory: %s' % (os_path)) + raise web.HTTPError(400, u"Not a directory: %s" % (os_path)) else: self.log.debug("Directory %r already exists", os_path) - async def save(self, model, path=''): + async def save(self, model, path=""): """Save the file model and return the model with no content.""" - path = path.strip('/') + path = path.strip("/") - if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') - if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + if "type" not in model: + raise web.HTTPError(400, u"No file type provided") + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) @@ -742,35 +747,36 @@ async def save(self, model, path=''): self.run_pre_save_hook(model=model, path=path) try: - if model['type'] == 'notebook': - nb = nbformat.from_dict(model['content']) + if model["type"] == "notebook": + nb = nbformat.from_dict(model["content"]) self.check_and_sign(nb, path) await self._save_notebook(os_path, nb) # One checkpoint should always exist for notebooks. if not (await self.checkpoints.list_checkpoints(path)): await self.create_checkpoint(path) - elif model['type'] == 'file': + elif model["type"] == "file": # Missing format will be handled internally by _save_file. - await self._save_file(os_path, model['content'], model.get('format')) - elif model['type'] == 'directory': + await self._save_file(os_path, model["content"], model.get("format")) + elif model["type"] == "directory": await self._save_directory(os_path, model, path) else: - raise web.HTTPError(400, "Unhandled contents type: %s" % model['type']) + raise web.HTTPError(400, "Unhandled contents type: %s" % model["type"]) except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' - % (path, e)) from e + self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError( + 500, u"Unexpected error while saving file: %s %s" % (path, e) + ) from e validation_message = None - if model['type'] == 'notebook': + if model["type"] == "notebook": self.validate_notebook_model(model) - validation_message = model.get('message', None) + validation_message = model.get("message", None) model = await self.get(path, content=False) if validation_message: - model['message'] = validation_message + model["message"] = validation_message self.run_post_save_hook(model=model, os_path=os_path) @@ -778,28 +784,28 @@ async def save(self, model, path=''): async def delete_file(self, path): """Delete file at path.""" - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path) rm = os.unlink if not os.path.exists(os_path): - raise web.HTTPError(404, u'File or directory does not exist: %s' % os_path) + raise web.HTTPError(404, u"File or directory does not exist: %s" % os_path) async def _check_trash(os_path): - if sys.platform in {'win32', 'darwin'}: + if sys.platform in {"win32", "darwin"}: return True # It's a bit more nuanced than this, but until we can better # distinguish errors from send2trash, assume that we can only trash # files on the same partition as the home directory. file_dev = (await run_sync(os.stat, os_path)).st_dev - home_dev = (await run_sync(os.stat, os.path.expanduser('~'))).st_dev + home_dev = (await run_sync(os.stat, os.path.expanduser("~"))).st_dev return file_dev == home_dev async def is_non_empty_dir(os_path): if os.path.isdir(os_path): # A directory containing only leftover checkpoints is # considered empty. - cp_dir = getattr(self.checkpoints, 'checkpoint_dir', None) + cp_dir = getattr(self.checkpoints, "checkpoint_dir", None) dir_contents = set(await run_sync(os.listdir, os_path)) if dir_contents - {cp_dir}: return True @@ -807,10 +813,10 @@ async def is_non_empty_dir(os_path): return False if self.delete_to_trash: - if sys.platform == 'win32' and await is_non_empty_dir(os_path): + if sys.platform == "win32" and await is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, u"Directory %s not empty" % os_path) if await _check_trash(os_path): self.log.debug("Sending %s to trash", os_path) # Looking at the code in send2trash, I don't think the errors it @@ -819,13 +825,14 @@ async def is_non_empty_dir(os_path): send2trash(os_path) return else: - self.log.warning("Skipping trash for %s, on different device " - "to home directory", os_path) + self.log.warning( + "Skipping trash for %s, on different device " "to home directory", os_path + ) if os.path.isdir(os_path): # Don't permanently delete non-empty directories. if await is_non_empty_dir(os_path): - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, u"Directory %s not empty" % os_path) self.log.debug("Removing directory %s", os_path) with self.perm_to_403(): await run_sync(shutil.rmtree, os_path) @@ -836,8 +843,8 @@ async def is_non_empty_dir(os_path): async def rename_file(self, old_path, new_path): """Rename a file.""" - old_path = old_path.strip('/') - new_path = new_path.strip('/') + old_path = old_path.strip("/") + new_path = new_path.strip("/") if new_path == old_path: return @@ -846,7 +853,7 @@ async def rename_file(self, old_path, new_path): # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): - raise web.HTTPError(409, u'File already exists: %s' % new_path) + raise web.HTTPError(409, u"File already exists: %s" % new_path) # Move the file try: @@ -855,5 +862,4 @@ async def rename_file(self, old_path, new_path): except web.HTTPError: raise except Exception as e: - raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % - (old_path, e)) from e + raise web.HTTPError(500, u"Unknown error renaming file: %s %s" % (old_path, e)) from e diff --git a/jupyter_server/services/contents/handlers.py b/jupyter_server/services/contents/handlers.py index b7a8b1af1b..b79ba1eb8f 100644 --- a/jupyter_server/services/contents/handlers.py +++ b/jupyter_server/services/contents/handlers.py @@ -2,20 +2,19 @@ Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-27%3A-Contents-Service """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import json -from tornado import web - -from jupyter_server.utils import url_path_join, url_escape, ensure_async from jupyter_client.jsonutil import date_default +from tornado import web -from jupyter_server.base.handlers import ( - JupyterHandler, APIHandler, path_regex, -) +from jupyter_server.base.handlers import APIHandler +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.base.handlers import path_regex +from jupyter_server.utils import ensure_async +from jupyter_server.utils import url_escape +from jupyter_server.utils import url_path_join def validate_model(model, expect_content): @@ -43,7 +42,7 @@ def validate_model(model, expect_content): u"Missing Model Keys: {missing}".format(missing=missing), ) - maybe_none_keys = ['content', 'format'] + maybe_none_keys = ["content", "format"] if expect_content: errors = [key for key in maybe_none_keys if model[key] is None] if errors: @@ -52,11 +51,7 @@ def validate_model(model, expect_content): u"Keys unexpectedly None: {keys}".format(keys=errors), ) else: - errors = { - key: model[key] - for key in maybe_none_keys - if model[key] is not None - } + errors = {key: model[key] for key in maybe_none_keys if model[key] is not None} if errors: raise web.HTTPError( 500, @@ -65,7 +60,6 @@ def validate_model(model, expect_content): class ContentsHandler(APIHandler): - def location_url(self, path): """Return the full URL location of a file. @@ -74,62 +68,67 @@ def location_url(self, path): path : unicode The API path of the file, such as "foo/bar.txt". """ - return url_path_join( - self.base_url, 'api', 'contents', url_escape(path) - ) + return url_path_join(self.base_url, "api", "contents", url_escape(path)) def _finish_model(self, model, location=True): """Finish a JSON request with a model, setting relevant headers, etc.""" if location: - location = self.location_url(model['path']) - self.set_header('Location', location) - self.set_header('Last-Modified', model['last_modified']) - self.set_header('Content-Type', 'application/json') + location = self.location_url(model["path"]) + self.set_header("Location", location) + self.set_header("Last-Modified", model["last_modified"]) + self.set_header("Content-Type", "application/json") self.finish(json.dumps(model, default=date_default)) @web.authenticated - async def get(self, path=''): + async def get(self, path=""): """Return a model for a file or directory. A directory model contains a list of models (without content) of the files and directories it contains. """ - path = path or '' - type = self.get_query_argument('type', default=None) - if type not in {None, 'directory', 'file', 'notebook'}: - raise web.HTTPError(400, u'Type %r is invalid' % type) - - format = self.get_query_argument('format', default=None) - if format not in {None, 'text', 'base64'}: - raise web.HTTPError(400, u'Format %r is invalid' % format) - content = self.get_query_argument('content', default='1') - if content not in {'0', '1'}: - raise web.HTTPError(400, u'Content %r is invalid' % content) + path = path or "" + type = self.get_query_argument("type", default=None) + if type not in {None, "directory", "file", "notebook"}: + raise web.HTTPError(400, u"Type %r is invalid" % type) + + format = self.get_query_argument("format", default=None) + if format not in {None, "text", "base64"}: + raise web.HTTPError(400, u"Format %r is invalid" % format) + content = self.get_query_argument("content", default="1") + if content not in {"0", "1"}: + raise web.HTTPError(400, u"Content %r is invalid" % content) content = int(content) - model = await ensure_async(self.contents_manager.get( - path=path, type=type, format=format, content=content, - )) + model = await ensure_async( + self.contents_manager.get( + path=path, + type=type, + format=format, + content=content, + ) + ) validate_model(model, expect_content=content) self._finish_model(model, location=False) @web.authenticated - async def patch(self, path=''): + async def patch(self, path=""): """PATCH renames a file or directory without re-uploading content.""" cm = self.contents_manager model = self.get_json_body() if model is None: - raise web.HTTPError(400, u'JSON body missing') + raise web.HTTPError(400, u"JSON body missing") model = await ensure_async(cm.update(model, path)) validate_model(model, expect_content=False) self._finish_model(model) async def _copy(self, copy_from, copy_to=None): """Copy a file, optionally specifying a target directory.""" - self.log.info(u"Copying {copy_from} to {copy_to}".format( - copy_from=copy_from, - copy_to=copy_to or '', - )) + self.log.info( + u"Copying {copy_from} to {copy_to}".format( + copy_from=copy_from, + copy_to=copy_to or "", + ) + ) model = await ensure_async(self.contents_manager.copy(copy_from, copy_to)) self.set_status(201) validate_model(model, expect_content=False) @@ -143,11 +142,12 @@ async def _upload(self, model, path): validate_model(model, expect_content=False) self._finish_model(model) - async def _new_untitled(self, path, type='', ext=''): + async def _new_untitled(self, path, type="", ext=""): """Create a new, empty untitled entity""" - self.log.info(u"Creating new %s in %s", type or 'file', path) - model = await ensure_async(self.contents_manager.new_untitled( - path=path, type=type, ext=ext)) + self.log.info(u"Creating new %s in %s", type or "file", path) + model = await ensure_async( + self.contents_manager.new_untitled(path=path, type=type, ext=ext) + ) self.set_status(201) validate_model(model, expect_content=False) self._finish_model(model) @@ -162,7 +162,7 @@ async def _save(self, model, path): self._finish_model(model) @web.authenticated - async def post(self, path=''): + async def post(self, path=""): """Create a new file in the specified path. POST creates new files. The server always decides on the name. @@ -187,9 +187,9 @@ async def post(self, path=''): model = self.get_json_body() if model is not None: - copy_from = model.get('copy_from') - ext = model.get('ext', '') - type = model.get('type', '') + copy_from = model.get("copy_from") + ext = model.get("ext", "") + type = model.get("type", "") if copy_from: await self._copy(copy_from, path) else: @@ -198,7 +198,7 @@ async def post(self, path=''): await self._new_untitled(path) @web.authenticated - async def put(self, path=''): + async def put(self, path=""): """Saves the file in the location specified by name and path. PUT is very similar to POST, but the requester specifies the name, @@ -211,7 +211,7 @@ async def put(self, path=''): """ model = self.get_json_body() if model: - if model.get('copy_from'): + if model.get("copy_from"): raise web.HTTPError(400, "Cannot copy with PUT, only POST") exists = await ensure_async(self.contents_manager.file_exists(path)) if exists: @@ -222,19 +222,18 @@ async def put(self, path=''): await self._new_untitled(path) @web.authenticated - async def delete(self, path=''): + async def delete(self, path=""): """delete a file in the given path""" cm = self.contents_manager - self.log.warning('delete %s', path) + self.log.warning("delete %s", path) await ensure_async(cm.delete(path)) self.set_status(204) self.finish() class CheckpointsHandler(APIHandler): - @web.authenticated - async def get(self, path=''): + async def get(self, path=""): """get lists checkpoints for a file""" cm = self.contents_manager checkpoints = await ensure_async(cm.list_checkpoints(path)) @@ -242,20 +241,24 @@ async def get(self, path=''): self.finish(data) @web.authenticated - async def post(self, path=''): + async def post(self, path=""): """post creates a new checkpoint""" cm = self.contents_manager checkpoint = await ensure_async(cm.create_checkpoint(path)) data = json.dumps(checkpoint, default=date_default) - location = url_path_join(self.base_url, 'api/contents', - url_escape(path), 'checkpoints', url_escape(checkpoint['id'])) - self.set_header('Location', location) + location = url_path_join( + self.base_url, + "api/contents", + url_escape(path), + "checkpoints", + url_escape(checkpoint["id"]), + ) + self.set_header("Location", location) self.set_status(201) self.finish(data) class ModifyCheckpointsHandler(APIHandler): - @web.authenticated async def post(self, path, checkpoint_id): """post restores a file from a checkpoint""" @@ -275,15 +278,12 @@ async def delete(self, path, checkpoint_id): class NotebooksRedirectHandler(JupyterHandler): """Redirect /api/notebooks to /api/contents""" - SUPPORTED_METHODS = ('GET', 'PUT', 'PATCH', 'POST', 'DELETE') + + SUPPORTED_METHODS = ("GET", "PUT", "PATCH", "POST", "DELETE") def get(self, path): self.log.warning("/api/notebooks is deprecated, use /api/contents") - self.redirect(url_path_join( - self.base_url, - 'api/contents', - url_escape(path) - )) + self.redirect(url_path_join(self.base_url, "api/contents", url_escape(path))) put = patch = post = delete = get @@ -292,22 +292,26 @@ class TrustNotebooksHandler(JupyterHandler): """ Handles trust/signing of notebooks """ @web.authenticated - async def post(self,path=''): + async def post(self, path=""): cm = self.contents_manager await ensure_async(cm.trust_notebook(path)) self.set_status(201) self.finish() -#----------------------------------------------------------------------------- + + +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _checkpoint_id_regex = r"(?P[\w-]+)" default_handlers = [ (r"/api/contents%s/checkpoints" % path_regex, CheckpointsHandler), - (r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex), - ModifyCheckpointsHandler), + ( + r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex), + ModifyCheckpointsHandler, + ), (r"/api/contents%s/trust" % path_regex, TrustNotebooksHandler), (r"/api/contents%s" % path_regex, ContentsHandler), (r"/api/notebooks/?(.*)", NotebooksRedirectHandler), diff --git a/jupyter_server/services/contents/largefilemanager.py b/jupyter_server/services/contents/largefilemanager.py index 936fc5cfbb..5c2b6b28b6 100644 --- a/jupyter_server/services/contents/largefilemanager.py +++ b/jupyter_server/services/contents/largefilemanager.py @@ -1,26 +1,34 @@ +import base64 +import io +import os + from anyio.to_thread import run_sync from tornado import web -import base64 -import os, io -from jupyter_server.services.contents.filemanager import AsyncFileContentsManager, FileContentsManager +from jupyter_server.services.contents.filemanager import AsyncFileContentsManager +from jupyter_server.services.contents.filemanager import FileContentsManager class LargeFileManager(FileContentsManager): """Handle large file upload.""" - def save(self, model, path=''): + def save(self, model, path=""): """Save the file model and return the model with no content.""" - chunk = model.get('chunk', None) + chunk = model.get("chunk", None) if chunk is not None: - path = path.strip('/') - - if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') - if model['type'] != 'file': - raise web.HTTPError(400, u'File type "{}" is not supported for large file transfer'.format(model['type'])) - if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + path = path.strip("/") + + if "type" not in model: + raise web.HTTPError(400, u"No file type provided") + if model["type"] != "file": + raise web.HTTPError( + 400, + u'File type "{}" is not supported for large file transfer'.format( + model["type"] + ), + ) + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) @@ -28,15 +36,18 @@ def save(self, model, path=''): if chunk == 1: self.log.debug("Saving %s", os_path) self.run_pre_save_hook(model=model, path=path) - super(LargeFileManager, self)._save_file(os_path, model['content'], model.get('format')) + super(LargeFileManager, self)._save_file( + os_path, model["content"], model.get("format") + ) else: - self._save_large_file(os_path, model['content'], model.get('format')) + self._save_large_file(os_path, model["content"], model.get("format")) except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % - (path, e)) from e + self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError( + 500, u"Unexpected error while saving file: %s %s" % (path, e) + ) from e model = self.get(path, content=False) @@ -49,44 +60,47 @@ def save(self, model, path=''): def _save_large_file(self, os_path, content, format): """Save content of a generic file.""" - if format not in {'text', 'base64'}: + if format not in {"text", "base64"}: raise web.HTTPError( 400, "Must specify format of file contents as 'text' or 'base64'", ) try: - if format == 'text': - bcontent = content.encode('utf8') + if format == "text": + bcontent = content.encode("utf8") else: - b64_bytes = content.encode('ascii') + b64_bytes = content.encode("ascii") bcontent = base64.b64decode(b64_bytes) except Exception as e: - raise web.HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) - ) from e + raise web.HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) from e with self.perm_to_403(os_path): if os.path.islink(os_path): os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) - with io.open(os_path, 'ab') as f: + with io.open(os_path, "ab") as f: f.write(bcontent) class AsyncLargeFileManager(AsyncFileContentsManager): """Handle large file upload asynchronously""" - async def save(self, model, path=''): + async def save(self, model, path=""): """Save the file model and return the model with no content.""" - chunk = model.get('chunk', None) + chunk = model.get("chunk", None) if chunk is not None: - path = path.strip('/') - - if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') - if model['type'] != 'file': - raise web.HTTPError(400, u'File type "{}" is not supported for large file transfer'.format(model['type'])) - if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + path = path.strip("/") + + if "type" not in model: + raise web.HTTPError(400, u"No file type provided") + if model["type"] != "file": + raise web.HTTPError( + 400, + u'File type "{}" is not supported for large file transfer'.format( + model["type"] + ), + ) + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) @@ -94,15 +108,18 @@ async def save(self, model, path=''): if chunk == 1: self.log.debug("Saving %s", os_path) self.run_pre_save_hook(model=model, path=path) - await super(AsyncLargeFileManager, self)._save_file(os_path, model['content'], model.get('format')) + await super(AsyncLargeFileManager, self)._save_file( + os_path, model["content"], model.get("format") + ) else: - await self._save_large_file(os_path, model['content'], model.get('format')) + await self._save_large_file(os_path, model["content"], model.get("format")) except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % - (path, e)) from e + self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError( + 500, u"Unexpected error while saving file: %s %s" % (path, e) + ) from e model = await self.get(path, content=False) @@ -115,26 +132,22 @@ async def save(self, model, path=''): async def _save_large_file(self, os_path, content, format): """Save content of a generic file.""" - if format not in {'text', 'base64'}: + if format not in {"text", "base64"}: raise web.HTTPError( 400, "Must specify format of file contents as 'text' or 'base64'", ) try: - if format == 'text': - bcontent = content.encode('utf8') + if format == "text": + bcontent = content.encode("utf8") else: - b64_bytes = content.encode('ascii') + b64_bytes = content.encode("ascii") bcontent = base64.b64decode(b64_bytes) except Exception as e: - raise web.HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) - ) from e + raise web.HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) from e with self.perm_to_403(os_path): if os.path.islink(os_path): os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) - with io.open(os_path, 'ab') as f: + with io.open(os_path, "ab") as f: await run_sync(f.write, bcontent) - - diff --git a/jupyter_server/services/contents/manager.py b/jupyter_server/services/contents/manager.py index 87a13330e3..f522b4bd26 100644 --- a/jupyter_server/services/contents/manager.py +++ b/jupyter_server/services/contents/manager.py @@ -1,38 +1,38 @@ """A base class for contents managers.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -from fnmatch import fnmatch import itertools import json import re +from fnmatch import fnmatch -from tornado.web import HTTPError, RequestHandler +from ipython_genutils.importstring import import_item +from nbformat import sign +from nbformat import validate as validate_nb +from nbformat import ValidationError +from nbformat.v4 import new_notebook +from tornado.web import HTTPError +from tornado.web import RequestHandler +from traitlets import Any +from traitlets import Bool +from traitlets import default +from traitlets import Dict +from traitlets import Instance +from traitlets import List +from traitlets import TraitError +from traitlets import Type +from traitlets import Unicode +from traitlets import validate +from traitlets.config.configurable import LoggingConfigurable from ...files.handlers import FilesHandler -from .checkpoints import Checkpoints, AsyncCheckpoints -from traitlets.config.configurable import LoggingConfigurable -from nbformat import sign, validate as validate_nb, ValidationError -from nbformat.v4 import new_notebook -from ipython_genutils.importstring import import_item -from traitlets import ( - Any, - Bool, - Dict, - Instance, - List, - TraitError, - Type, - Unicode, - validate, - default, -) +from .checkpoints import AsyncCheckpoints +from .checkpoints import Checkpoints from jupyter_server.transutils import _i18n from jupyter_server.utils import ensure_async -copy_pat = re.compile(r'\-Copy\d*\.') +copy_pat = re.compile(r"\-Copy\d*\.") class ContentsManager(LoggingConfigurable): @@ -54,34 +54,50 @@ class ContentsManager(LoggingConfigurable): """ - root_dir = Unicode('/', config=True) + root_dir = Unicode("/", config=True) allow_hidden = Bool(False, config=True, help="Allow access to hidden files") notary = Instance(sign.NotebookNotary) + def _notary_default(self): return sign.NotebookNotary(parent=self) - hide_globs = List(Unicode(), [ - u'__pycache__', '*.pyc', '*.pyo', - '.DS_Store', '*.so', '*.dylib', '*~', - ], config=True, help=""" + hide_globs = List( + Unicode(), + [ + u"__pycache__", + "*.pyc", + "*.pyo", + ".DS_Store", + "*.so", + "*.dylib", + "*~", + ], + config=True, + help=""" Glob patterns to hide in file and directory listings. - """) + """, + ) - untitled_notebook = Unicode(_i18n("Untitled"), config=True, - help="The base name used when creating untitled notebooks." + untitled_notebook = Unicode( + _i18n("Untitled"), config=True, help="The base name used when creating untitled notebooks." ) - untitled_file = Unicode("untitled", config=True, - help="The base name used when creating untitled files." + untitled_file = Unicode( + "untitled", config=True, help="The base name used when creating untitled files." ) - untitled_directory = Unicode("Untitled Folder", config=True, - help="The base name used when creating untitled directories." + untitled_directory = Unicode( + "Untitled Folder", + config=True, + help="The base name used when creating untitled directories.", ) - pre_save_hook = Any(None, config=True, allow_none=True, + pre_save_hook = Any( + None, + config=True, + allow_none=True, help="""Python callable or importstring thereof To be called on a contents model prior to save. @@ -98,12 +114,12 @@ def _notary_default(self): Modifying this dict will affect the file that is stored. - path: the API path of the save destination - contents_manager: this ContentsManager instance - """ + """, ) - @validate('pre_save_hook') + @validate("pre_save_hook") def _validate_pre_save_hook(self, proposal): - value = proposal['value'] + value = proposal["value"] if isinstance(value, str): value = import_item(self.pre_save_hook) if not callable(value): @@ -129,11 +145,11 @@ def run_pre_save_hook(self, model, path, **kwargs): checkpoints = Instance(Checkpoints, config=True) checkpoints_kwargs = Dict(config=True) - @default('checkpoints') + @default("checkpoints") def _default_checkpoints(self): return self.checkpoints_class(**self.checkpoints_kwargs) - @default('checkpoints_kwargs') + @default("checkpoints_kwargs") def _default_checkpoints_kwargs(self): return dict( parent=self, @@ -141,7 +157,10 @@ def _default_checkpoints_kwargs(self): ) files_handler_class = Type( - FilesHandler, klass=RequestHandler, allow_none=True, config=True, + FilesHandler, + klass=RequestHandler, + allow_none=True, + config=True, help="""handler class to use when serving raw file requests. Default is a fallback that talks to the ContentsManager API, @@ -151,7 +170,7 @@ def _default_checkpoints_kwargs(self): which will be much more efficient. Access to these files should be Authenticated. - """ + """, ) files_handler_params = Dict( @@ -160,7 +179,7 @@ def _default_checkpoints_kwargs(self): For example, StaticFileHandlers generally expect a `path` argument specifying the root directory from which to serve files. - """ + """, ) def get_extra_handlers(self): @@ -170,9 +189,7 @@ def get_extra_handlers(self): """ handlers = [] if self.files_handler_class: - handlers.append( - (r"/files/(.*)", self.files_handler_class, self.files_handler_params) - ) + handlers.append((r"/files/(.*)", self.files_handler_class, self.files_handler_params)) return handlers # ContentsManager API part 1: methods that must be @@ -214,7 +231,7 @@ def is_hidden(self, path): """ raise NotImplementedError - def file_exists(self, path=''): + def file_exists(self, path=""): """Does a file exist at the given path? Like os.path.isfile @@ -231,7 +248,7 @@ def file_exists(self, path=''): exists : bool Whether the file exists. """ - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") def exists(self, path): """Does a file or directory exist at the given path? @@ -252,7 +269,7 @@ def exists(self, path): def get(self, path, content=True, type=None, format=None): """Get a file or directory model.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") def save(self, model, path): """ @@ -262,22 +279,22 @@ def save(self, model, path): should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") def delete_file(self, path): """Delete the file or directory at path.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") def rename_file(self, old_path, new_path): """Rename a file or directory.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") # ContentsManager API part 2: methods that have useable default # implementations, but can be overridden in subclasses. def delete(self, path): """Delete a file/directory and any associated checkpoints.""" - path = path.strip('/') + path = path.strip("/") if not path: raise HTTPError(400, "Can't delete root") self.delete_file(path) @@ -294,8 +311,8 @@ def update(self, model, path): For use in PATCH requests, to enable renaming a file without re-uploading its contents. Only used for renaming at the moment. """ - path = path.strip('/') - new_path = model.get('path', path).strip('/') + path = path.strip("/") + new_path = model.get("path", path).strip("/") if path != new_path: self.rename(path, new_path) model = self.get(new_path, content=False) @@ -314,9 +331,9 @@ def get_kernel_path(self, path, model=None): notebook server. FileContentsManager overrides this to use the directory containing the notebook. """ - return '' + return "" - def increment_filename(self, filename, path='', insert=''): + def increment_filename(self, filename, path="", insert=""): """Increment a filename until it is unique. Parameters @@ -334,35 +351,37 @@ def increment_filename(self, filename, path='', insert=''): A filename that is unique, based on the input filename. """ # Extract the full suffix from the filename (e.g. .tar.gz) - path = path.strip('/') - basename, dot, ext = filename.rpartition('.') - if ext != 'ipynb': - basename, dot, ext = filename.partition('.') + path = path.strip("/") + basename, dot, ext = filename.rpartition(".") + if ext != "ipynb": + basename, dot, ext = filename.partition(".") suffix = dot + ext for i in itertools.count(): if i: - insert_i = '{}{}'.format(insert, i) + insert_i = "{}{}".format(insert, i) else: - insert_i = '' - name = u'{basename}{insert}{suffix}'.format(basename=basename, - insert=insert_i, suffix=suffix) - if not self.exists(u'{}/{}'.format(path, name)): + insert_i = "" + name = u"{basename}{insert}{suffix}".format( + basename=basename, insert=insert_i, suffix=suffix + ) + if not self.exists(u"{}/{}".format(path, name)): break return name def validate_notebook_model(self, model): """Add failed-validation message to model""" try: - validate_nb(model['content']) + validate_nb(model["content"]) except ValidationError as e: - model['message'] = u'Notebook validation failed: {}:\n{}'.format( - e.message, json.dumps(e.instance, indent=1, default=lambda obj: ''), + model["message"] = u"Notebook validation failed: {}:\n{}".format( + e.message, + json.dumps(e.instance, indent=1, default=lambda obj: ""), ) return model - def new_untitled(self, path='', type='', ext=''): + def new_untitled(self, path="", type="", ext=""): """Create a new untitled file or directory in path path must be a directory @@ -371,58 +390,58 @@ def new_untitled(self, path='', type='', ext=''): Use `new` to create files with a fully specified path (including filename). """ - path = path.strip('/') + path = path.strip("/") if not self.dir_exists(path): - raise HTTPError(404, 'No such directory: %s' % path) + raise HTTPError(404, "No such directory: %s" % path) model = {} if type: - model['type'] = type + model["type"] = type - if ext == '.ipynb': - model.setdefault('type', 'notebook') + if ext == ".ipynb": + model.setdefault("type", "notebook") else: - model.setdefault('type', 'file') + model.setdefault("type", "file") - insert = '' - if model['type'] == 'directory': + insert = "" + if model["type"] == "directory": untitled = self.untitled_directory - insert = ' ' - elif model['type'] == 'notebook': + insert = " " + elif model["type"] == "notebook": untitled = self.untitled_notebook - ext = '.ipynb' - elif model['type'] == 'file': + ext = ".ipynb" + elif model["type"] == "file": untitled = self.untitled_file else: - raise HTTPError(400, "Unexpected model type: %r" % model['type']) + raise HTTPError(400, "Unexpected model type: %r" % model["type"]) name = self.increment_filename(untitled + ext, path, insert=insert) - path = u'{0}/{1}'.format(path, name) + path = u"{0}/{1}".format(path, name) return self.new(model, path) - def new(self, model=None, path=''): + def new(self, model=None, path=""): """Create a new file or directory and return its model with no content. To create a new untitled entity in a directory, use `new_untitled`. """ - path = path.strip('/') + path = path.strip("/") if model is None: model = {} - if path.endswith('.ipynb'): - model.setdefault('type', 'notebook') + if path.endswith(".ipynb"): + model.setdefault("type", "notebook") else: - model.setdefault('type', 'file') + model.setdefault("type", "file") # no content, not a directory, so fill out new-file model - if 'content' not in model and model['type'] != 'directory': - if model['type'] == 'notebook': - model['content'] = new_notebook() - model['format'] = 'json' + if "content" not in model and model["type"] != "directory": + if model["type"] == "notebook": + model["content"] = new_notebook() + model["format"] = "json" else: - model['content'] = '' - model['type'] = 'file' - model['format'] = 'text' + model["content"] = "" + model["type"] = "file" + model["format"] = "text" model = self.save(model, path) return model @@ -437,28 +456,28 @@ def copy(self, from_path, to_path=None): from_path must be a full path to a file. """ - path = from_path.strip('/') + path = from_path.strip("/") if to_path is not None: - to_path = to_path.strip('/') + to_path = to_path.strip("/") - if '/' in path: - from_dir, from_name = path.rsplit('/', 1) + if "/" in path: + from_dir, from_name = path.rsplit("/", 1) else: - from_dir = '' + from_dir = "" from_name = path model = self.get(path) - model.pop('path', None) - model.pop('name', None) - if model['type'] == 'directory': + model.pop("path", None) + model.pop("name", None) + if model["type"] == "directory": raise HTTPError(400, "Can't copy directories") if to_path is None: to_path = from_dir if self.dir_exists(to_path): - name = copy_pat.sub(u'.', from_name) - to_name = self.increment_filename(name, to_path, insert='-Copy') - to_path = u'{0}/{1}'.format(to_path, to_name) + name = copy_pat.sub(u".", from_name) + to_name = self.increment_filename(name, to_path, insert="-Copy") + to_path = u"{0}/{1}".format(to_path, to_name) model = self.save(model, to_path) return model @@ -475,12 +494,12 @@ def trust_notebook(self, path): The path of a notebook """ model = self.get(path) - nb = model['content'] + nb = model["content"] self.log.warning("Trusting notebook %s", path) self.notary.mark_cells(nb, True) self.check_and_sign(nb, path) - def check_and_sign(self, nb, path=''): + def check_and_sign(self, nb, path=""): """Check for trusted cells, and sign the notebook. Called as a part of saving notebooks. @@ -497,7 +516,7 @@ def check_and_sign(self, nb, path=''): else: self.log.warning("Notebook %s is not trusted", path) - def mark_trusted_cells(self, nb, path=''): + def mark_trusted_cells(self, nb, path=""): """Mark cells as trusted if the notebook signature matches. Called as a part of loading notebooks. @@ -543,11 +562,11 @@ class AsyncContentsManager(ContentsManager): checkpoints = Instance(AsyncCheckpoints, config=True) checkpoints_kwargs = Dict(config=True) - @default('checkpoints') + @default("checkpoints") def _default_checkpoints(self): return self.checkpoints_class(**self.checkpoints_kwargs) - @default('checkpoints_kwargs') + @default("checkpoints_kwargs") def _default_checkpoints_kwargs(self): return dict( parent=self, @@ -593,7 +612,7 @@ async def is_hidden(self, path): """ raise NotImplementedError - async def file_exists(self, path=''): + async def file_exists(self, path=""): """Does a file exist at the given path? Like os.path.isfile @@ -610,7 +629,7 @@ async def file_exists(self, path=''): exists : bool Whether the file exists. """ - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") async def exists(self, path): """Does a file or directory exist at the given path? @@ -627,11 +646,13 @@ async def exists(self, path): exists : bool Whether the target exists. """ - return await ensure_async(self.file_exists(path)) or await ensure_async(self.dir_exists(path)) + return await ensure_async(self.file_exists(path)) or await ensure_async( + self.dir_exists(path) + ) async def get(self, path, content=True, type=None, format=None): """Get a file or directory model.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") async def save(self, model, path): """ @@ -641,22 +662,22 @@ async def save(self, model, path): should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") async def delete_file(self, path): """Delete the file or directory at path.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") async def rename_file(self, old_path, new_path): """Rename a file or directory.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") # ContentsManager API part 2: methods that have useable default # implementations, but can be overridden in subclasses. async def delete(self, path): """Delete a file/directory and any associated checkpoints.""" - path = path.strip('/') + path = path.strip("/") if not path: raise HTTPError(400, "Can't delete root") @@ -674,14 +695,14 @@ async def update(self, model, path): For use in PATCH requests, to enable renaming a file without re-uploading its contents. Only used for renaming at the moment. """ - path = path.strip('/') - new_path = model.get('path', path).strip('/') + path = path.strip("/") + new_path = model.get("path", path).strip("/") if path != new_path: await self.rename(path, new_path) model = await self.get(new_path, content=False) return model - async def increment_filename(self, filename, path='', insert=''): + async def increment_filename(self, filename, path="", insert=""): """Increment a filename until it is unique. Parameters @@ -699,26 +720,27 @@ async def increment_filename(self, filename, path='', insert=''): A filename that is unique, based on the input filename. """ # Extract the full suffix from the filename (e.g. .tar.gz) - path = path.strip('/') - basename, dot, ext = filename.rpartition('.') - if ext != 'ipynb': - basename, dot, ext = filename.partition('.') + path = path.strip("/") + basename, dot, ext = filename.rpartition(".") + if ext != "ipynb": + basename, dot, ext = filename.partition(".") suffix = dot + ext for i in itertools.count(): if i: - insert_i = '{}{}'.format(insert, i) + insert_i = "{}{}".format(insert, i) else: - insert_i = '' - name = u'{basename}{insert}{suffix}'.format(basename=basename, - insert=insert_i, suffix=suffix) - file_exists = await ensure_async(self.exists(u'{}/{}'.format(path, name))) + insert_i = "" + name = u"{basename}{insert}{suffix}".format( + basename=basename, insert=insert_i, suffix=suffix + ) + file_exists = await ensure_async(self.exists(u"{}/{}".format(path, name))) if not file_exists: break return name - async def new_untitled(self, path='', type='', ext=''): + async def new_untitled(self, path="", type="", ext=""): """Create a new untitled file or directory in path path must be a directory @@ -727,59 +749,59 @@ async def new_untitled(self, path='', type='', ext=''): Use `new` to create files with a fully specified path (including filename). """ - path = path.strip('/') + path = path.strip("/") dir_exists = await ensure_async(self.dir_exists(path)) if not dir_exists: - raise HTTPError(404, 'No such directory: %s' % path) + raise HTTPError(404, "No such directory: %s" % path) model = {} if type: - model['type'] = type + model["type"] = type - if ext == '.ipynb': - model.setdefault('type', 'notebook') + if ext == ".ipynb": + model.setdefault("type", "notebook") else: - model.setdefault('type', 'file') + model.setdefault("type", "file") - insert = '' - if model['type'] == 'directory': + insert = "" + if model["type"] == "directory": untitled = self.untitled_directory - insert = ' ' - elif model['type'] == 'notebook': + insert = " " + elif model["type"] == "notebook": untitled = self.untitled_notebook - ext = '.ipynb' - elif model['type'] == 'file': + ext = ".ipynb" + elif model["type"] == "file": untitled = self.untitled_file else: - raise HTTPError(400, "Unexpected model type: %r" % model['type']) + raise HTTPError(400, "Unexpected model type: %r" % model["type"]) name = await self.increment_filename(untitled + ext, path, insert=insert) - path = u'{0}/{1}'.format(path, name) + path = u"{0}/{1}".format(path, name) return await self.new(model, path) - async def new(self, model=None, path=''): + async def new(self, model=None, path=""): """Create a new file or directory and return its model with no content. To create a new untitled entity in a directory, use `new_untitled`. """ - path = path.strip('/') + path = path.strip("/") if model is None: model = {} - if path.endswith('.ipynb'): - model.setdefault('type', 'notebook') + if path.endswith(".ipynb"): + model.setdefault("type", "notebook") else: - model.setdefault('type', 'file') + model.setdefault("type", "file") # no content, not a directory, so fill out new-file model - if 'content' not in model and model['type'] != 'directory': - if model['type'] == 'notebook': - model['content'] = new_notebook() - model['format'] = 'json' + if "content" not in model and model["type"] != "directory": + if model["type"] == "notebook": + model["content"] = new_notebook() + model["format"] = "json" else: - model['content'] = '' - model['type'] = 'file' - model['format'] = 'text' + model["content"] = "" + model["type"] = "file" + model["format"] = "text" model = await self.save(model, path) return model @@ -794,27 +816,27 @@ async def copy(self, from_path, to_path=None): from_path must be a full path to a file. """ - path = from_path.strip('/') + path = from_path.strip("/") if to_path is not None: - to_path = to_path.strip('/') + to_path = to_path.strip("/") - if '/' in path: - from_dir, from_name = path.rsplit('/', 1) + if "/" in path: + from_dir, from_name = path.rsplit("/", 1) else: - from_dir = '' + from_dir = "" from_name = path model = await self.get(path) - model.pop('path', None) - model.pop('name', None) - if model['type'] == 'directory': + model.pop("path", None) + model.pop("name", None) + if model["type"] == "directory": raise HTTPError(400, "Can't copy directories") if to_path is None: to_path = from_dir if await ensure_async(self.dir_exists(to_path)): - name = copy_pat.sub(u'.', from_name) - to_name = await self.increment_filename(name, to_path, insert='-Copy') - to_path = u'{0}/{1}'.format(to_path, to_name) + name = copy_pat.sub(u".", from_name) + to_name = await self.increment_filename(name, to_path, insert="-Copy") + to_path = u"{0}/{1}".format(to_path, to_name) model = await self.save(model, to_path) return model @@ -828,7 +850,7 @@ async def trust_notebook(self, path): The path of a notebook """ model = await self.get(path) - nb = model['content'] + nb = model["content"] self.log.warning("Trusting notebook %s", path) self.notary.mark_cells(nb, True) self.check_and_sign(nb, path) diff --git a/jupyter_server/services/kernels/handlers.py b/jupyter_server/services/kernels/handlers.py index b83046d858..efc6bbd6de 100644 --- a/jupyter_server/services/kernels/handlers.py +++ b/jupyter_server/services/kernels/handlers.py @@ -2,30 +2,29 @@ Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-16%3A-Notebook-multi-directory-dashboard-and-URL-mapping#kernels-api """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import json import logging from textwrap import dedent -from tornado import web, gen -from tornado.concurrent import Future -from tornado.ioloop import IOLoop - +from ipython_genutils.py3compat import cast_unicode from jupyter_client import protocol_version as client_protocol_version from jupyter_client.jsonutil import date_default -from ipython_genutils.py3compat import cast_unicode -from jupyter_server.utils import url_path_join, url_escape, ensure_async +from tornado import gen +from tornado import web +from tornado.concurrent import Future +from tornado.ioloop import IOLoop from ...base.handlers import APIHandler -from ...base.zmqhandlers import AuthenticatedZMQStreamHandler, deserialize_binary_message - +from ...base.zmqhandlers import AuthenticatedZMQStreamHandler +from ...base.zmqhandlers import deserialize_binary_message +from jupyter_server.utils import ensure_async +from jupyter_server.utils import url_escape +from jupyter_server.utils import url_path_join class MainKernelHandler(APIHandler): - @web.authenticated async def get(self): km = self.kernel_manager @@ -37,23 +36,19 @@ async def post(self): km = self.kernel_manager model = self.get_json_body() if model is None: - model = { - 'name': km.default_kernel_name - } + model = {"name": km.default_kernel_name} else: - model.setdefault('name', km.default_kernel_name) + model.setdefault("name", km.default_kernel_name) - kernel_id = await km.start_kernel(kernel_name=model['name'], - path=model.get('path')) + kernel_id = await km.start_kernel(kernel_name=model["name"], path=model.get("path")) model = await ensure_async(km.kernel_model(kernel_id)) - location = url_path_join(self.base_url, 'api', 'kernels', url_escape(kernel_id)) - self.set_header('Location', location) + location = url_path_join(self.base_url, "api", "kernels", url_escape(kernel_id)) + self.set_header("Location", location) self.set_status(201) self.finish(json.dumps(model, default=date_default)) class KernelHandler(APIHandler): - @web.authenticated async def get(self, kernel_id): km = self.kernel_manager @@ -69,14 +64,13 @@ async def delete(self, kernel_id): class KernelActionHandler(APIHandler): - @web.authenticated async def post(self, kernel_id, action): km = self.kernel_manager - if action == 'interrupt': + if action == "interrupt": await ensure_async(km.interrupt_kernel(kernel_id)) self.set_status(204) - if action == 'restart': + if action == "restart": try: await km.restart_kernel(kernel_id) @@ -90,9 +84,9 @@ async def post(self, kernel_id, action): class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): - '''There is one ZMQChannelsHandler per running kernel and it oversees all + """There is one ZMQChannelsHandler per running kernel and it oversees all the sessions. - ''' + """ # class-level registry of open sessions # allows checking for conflict on session-id, @@ -102,28 +96,28 @@ class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): @property def kernel_info_timeout(self): km_default = self.kernel_manager.kernel_info_timeout - return self.settings.get('kernel_info_timeout', km_default) + return self.settings.get("kernel_info_timeout", km_default) @property def iopub_msg_rate_limit(self): - return self.settings.get('iopub_msg_rate_limit', 0) + return self.settings.get("iopub_msg_rate_limit", 0) @property def iopub_data_rate_limit(self): - return self.settings.get('iopub_data_rate_limit', 0) + return self.settings.get("iopub_data_rate_limit", 0) @property def rate_limit_window(self): - return self.settings.get('rate_limit_window', 1.0) + return self.settings.get("rate_limit_window", 1.0) def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, getattr(self, 'kernel_id', 'uninitialized')) + return "%s(%s)" % (self.__class__.__name__, getattr(self, "kernel_id", "uninitialized")) def create_stream(self): km = self.kernel_manager identity = self.session.bsession - for channel in ('iopub', 'shell', 'control', 'stdin'): - meth = getattr(km, 'connect_' + channel) + for channel in ("iopub", "shell", "control", "stdin"): + meth = getattr(km, "connect_" + channel) self.channels[channel] = stream = meth(self.kernel_id, identity=identity) stream.channel = channel @@ -201,25 +195,19 @@ def nudge(count): # NOTE: this close check appears to never be True during on_open, # even when the peer has closed the connection if self.ws_connection is None or self.ws_connection.is_closing(): - self.log.debug( - "Nudge: cancelling on closed websocket: %s", self.kernel_id - ) + self.log.debug("Nudge: cancelling on closed websocket: %s", self.kernel_id) finish() return # check for stopped kernel if self.kernel_id not in self.kernel_manager: - self.log.debug( - "Nudge: cancelling on stopped kernel: %s", self.kernel_id - ) + self.log.debug("Nudge: cancelling on stopped kernel: %s", self.kernel_id) finish() return # check for closed zmq socket if shell_channel.closed(): - self.log.debug( - "Nudge: cancelling on closed zmq socket: %s", self.kernel_id - ) + self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) finish() return @@ -266,7 +254,7 @@ def _handle_kernel_info_reply(self, msg): enabling msg spec adaptation, if necessary """ - idents,msg = self.session.feed_identities(msg) + idents, msg = self.session.feed_identities(msg) try: msg = self.session.deserialize(msg) except: @@ -274,9 +262,9 @@ def _handle_kernel_info_reply(self, msg): self._kernel_info_future.set_result({}) return else: - info = msg['content'] + info = msg["content"] self.log.debug("Received kernel info: %s", info) - if msg['msg_type'] != 'kernel_info_reply' or 'protocol_version' not in info: + if msg["msg_type"] != "kernel_info_reply" or "protocol_version" not in info: self.log.error("Kernel info request failed, assuming current %s", info) info = {} self._finish_kernel_info(info) @@ -292,10 +280,16 @@ def _finish_kernel_info(self, info): Set up protocol adaptation, if needed, and signal that connection can continue. """ - protocol_version = info.get('protocol_version', client_protocol_version) + protocol_version = info.get("protocol_version", client_protocol_version) if protocol_version != client_protocol_version: - self.session.adapt_version = int(protocol_version.split('.')[0]) - self.log.info("Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format(protocol_version=protocol_version, kernel_id=self.kernel_id, client_protocol_version=client_protocol_version)) + self.session.adapt_version = int(protocol_version.split(".")[0]) + self.log.info( + "Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format( + protocol_version=protocol_version, + kernel_id=self.kernel_id, + client_protocol_version=client_protocol_version, + ) + ) if not self._kernel_info_future.done(): self._kernel_info_future.set_result(info) @@ -307,7 +301,7 @@ def initialize(self): self.kernel_info_channel = None self._kernel_info_future = Future() self._close_future = Future() - self.session_key = '' + self.session_key = "" # Rate limiting code self._iopub_window_msg_count = 0 @@ -337,13 +331,14 @@ def give_up(): return self.log.warning("Timeout waiting for kernel_info reply from %s", self.kernel_id) future.set_result({}) + loop = IOLoop.current() loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up) # actually wait for it await future async def get(self, kernel_id): - self.kernel_id = cast_unicode(kernel_id, 'ascii') + self.kernel_id = cast_unicode(kernel_id, "ascii") await super(ZMQChannelsHandler, self).get(kernel_id=kernel_id) async def _register_session(self): @@ -353,7 +348,7 @@ async def _register_session(self): This is likely due to a client reconnecting from a lost network connection, where the socket on our side has not been cleaned up yet. """ - self.session_key = '%s:%s' % (self.kernel_id, self.session.session) + self.session_key = "%s:%s" % (self.kernel_id, self.session.session) stale_handler = self._open_sessions.get(self.session_key) if stale_handler: self.log.warning("Replacing stale connection: %s", self.session_key) @@ -367,7 +362,7 @@ def open(self, kernel_id): # on new connections, flush the message buffer buffer_info = km.get_buffer(kernel_id, self.session_key) - if buffer_info and buffer_info['session_key'] == self.session_key: + if buffer_info and buffer_info["session_key"] == self.session_key: self.log.info("Restoring connection for %s", self.session_key) if km.ports_changed(kernel_id): # If the kernel's ports have changed (some restarts trigger this) @@ -375,12 +370,12 @@ def open(self, kernel_id): self.create_stream() else: # The kernel's ports have not changed; use the channels captured in the buffer - self.channels = buffer_info['channels'] + self.channels = buffer_info["channels"] connected = self.nudge() def replay(value): - replay_buffer = buffer_info['buffer'] + replay_buffer = buffer_info["buffer"] if replay_buffer: self.log.info("Replaying %s buffered messages", len(replay_buffer)) for channel, msg_list in replay_buffer: @@ -403,7 +398,7 @@ def replay(value): return km.add_restart_callback(self.kernel_id, self.on_kernel_restarted) - km.add_restart_callback(self.kernel_id, self.on_restart_failed, 'dead') + km.add_restart_callback(self.kernel_id, self.on_restart_failed, "dead") def subscribe(value): for channel, stream in self.channels.items(): @@ -413,7 +408,6 @@ def subscribe(value): return connected - def on_message(self, msg): if not self.channels: # already closed, ignore the message @@ -423,15 +417,15 @@ def on_message(self, msg): msg = deserialize_binary_message(msg) else: msg = json.loads(msg) - channel = msg.pop('channel', None) + channel = msg.pop("channel", None) if channel is None: self.log.warning("No channel specified, assuming shell: %s", msg) - channel = 'shell' + channel = "shell" if channel not in self.channels: self.log.warning("No such channel: %r", channel) return am = self.kernel_manager.allowed_message_types - mt = msg['header']['msg_type'] + mt = msg["header"]["msg_type"] if am and mt not in am: self.log.warning('Received message of type "%s", which is not allowed. Ignoring.' % mt) else: @@ -441,23 +435,27 @@ def on_message(self, msg): def _on_zmq_reply(self, stream, msg_list): idents, fed_msg_list = self.session.feed_identities(msg_list) msg = self.session.deserialize(fed_msg_list) - parent = msg['parent_header'] + parent = msg["parent_header"] + def write_stderr(error_message): self.log.warning(error_message) - msg = self.session.msg("stream", - content={"text": error_message + '\n', "name": "stderr"}, - parent=parent + msg = self.session.msg( + "stream", content={"text": error_message + "\n", "name": "stderr"}, parent=parent ) - msg['channel'] = 'iopub' + msg["channel"] = "iopub" self.write_message(json.dumps(msg, default=date_default)) - channel = getattr(stream, 'channel', None) - msg_type = msg['header']['msg_type'] - if channel == 'iopub' and msg_type == 'error': - self._on_error(msg) + channel = getattr(stream, "channel", None) + msg_type = msg["header"]["msg_type"] + if channel == "iopub" and msg_type == "error": + self._on_error(msg) - if channel == 'iopub' and msg_type == 'status' and msg['content'].get('execution_state') == 'idle': + if ( + channel == "iopub" + and msg_type == "status" + and msg["content"].get("execution_state") == "idle" + ): # reset rate limit counter on status=idle, # to avoid 'Run All' hitting limits prematurely. self._iopub_window_byte_queue = [] @@ -466,13 +464,13 @@ def write_stderr(error_message): self._iopub_msgs_exceeded = False self._iopub_data_exceeded = False - if channel == 'iopub' and msg_type not in {'status', 'comm_open', 'execute_input'}: + if channel == "iopub" and msg_type not in {"status", "comm_open", "execute_input"}: # Remove the counts queued for removal. now = IOLoop.current().time() while len(self._iopub_window_byte_queue) > 0: queued = self._iopub_window_byte_queue[0] - if (now >= queued[0]): + if now >= queued[0]: self._iopub_window_byte_count -= queued[1] self._iopub_window_msg_count -= 1 del self._iopub_window_byte_queue[0] @@ -483,7 +481,7 @@ def write_stderr(error_message): # Increment the bytes and message count self._iopub_window_msg_count += 1 - if msg_type == 'stream': + if msg_type == "stream": byte_count = sum([len(x) for x in msg_list]) else: byte_count = 0 @@ -502,7 +500,9 @@ def write_stderr(error_message): if self.iopub_msg_rate_limit > 0 and msg_rate > self.iopub_msg_rate_limit: if not self._iopub_msgs_exceeded: self._iopub_msgs_exceeded = True - write_stderr(dedent("""\ + write_stderr( + dedent( + """\ IOPub message rate exceeded. The Jupyter server will temporarily stop sending output to the client in order to avoid crashing it. @@ -512,7 +512,11 @@ def write_stderr(error_message): Current values: ServerApp.iopub_msg_rate_limit={} (msgs/sec) ServerApp.rate_limit_window={} (secs) - """.format(self.iopub_msg_rate_limit, self.rate_limit_window))) + """.format( + self.iopub_msg_rate_limit, self.rate_limit_window + ) + ) + ) else: # resume once we've got some headroom below the limit if self._iopub_msgs_exceeded and msg_rate < (0.8 * self.iopub_msg_rate_limit): @@ -524,7 +528,9 @@ def write_stderr(error_message): if self.iopub_data_rate_limit > 0 and data_rate > self.iopub_data_rate_limit: if not self._iopub_data_exceeded: self._iopub_data_exceeded = True - write_stderr(dedent("""\ + write_stderr( + dedent( + """\ IOPub data rate exceeded. The Jupyter server will temporarily stop sending output to the client in order to avoid crashing it. @@ -534,7 +540,11 @@ def write_stderr(error_message): Current values: ServerApp.iopub_data_rate_limit={} (bytes/sec) ServerApp.rate_limit_window={} (secs) - """.format(self.iopub_data_rate_limit, self.rate_limit_window))) + """.format( + self.iopub_data_rate_limit, self.rate_limit_window + ) + ) + ) else: # resume once we've got some headroom below the limit if self._iopub_data_exceeded and data_rate < (0.8 * self.iopub_data_rate_limit): @@ -565,10 +575,13 @@ def on_close(self): if self.kernel_id in km: km.notify_disconnect(self.kernel_id) km.remove_restart_callback( - self.kernel_id, self.on_kernel_restarted, + self.kernel_id, + self.on_kernel_restarted, ) km.remove_restart_callback( - self.kernel_id, self.on_restart_failed, 'dead', + self.kernel_id, + self.on_restart_failed, + "dead", ) # start buffering instead of closing if this was the last connection @@ -589,36 +602,35 @@ def on_close(self): self._close_future.set_result(None) def _send_status_message(self, status): - iopub = self.channels.get('iopub', None) + iopub = self.channels.get("iopub", None) if iopub and not iopub.closed(): # flush IOPub before sending a restarting/dead status message # ensures proper ordering on the IOPub channel # that all messages from the stopped kernel have been delivered iopub.flush() - msg = self.session.msg("status", - {'execution_state': status} - ) - msg['channel'] = 'iopub' + msg = self.session.msg("status", {"execution_state": status}) + msg["channel"] = "iopub" self.write_message(json.dumps(msg, default=date_default)) def on_kernel_restarted(self): logging.warn("kernel %s restarted", self.kernel_id) - self._send_status_message('restarting') + self._send_status_message("restarting") def on_restart_failed(self): logging.error("kernel %s restarted failed!", self.kernel_id) - self._send_status_message('dead') + self._send_status_message("dead") def _on_error(self, msg): if self.kernel_manager.allow_tracebacks: return - msg['content']['ename'] = 'ExecutionError' - msg['content']['evalue'] = 'Execution error' - msg['content']['traceback'] = [self.kernel_manager.traceback_replacement_message] + msg["content"]["ename"] = "ExecutionError" + msg["content"]["evalue"] = "Execution error" + msg["content"]["traceback"] = [self.kernel_manager.traceback_replacement_message] + -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _kernel_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" diff --git a/jupyter_server/services/kernels/kernelmanager.py b/jupyter_server/services/kernels/kernelmanager.py index c00cce836d..02a1e4d346 100644 --- a/jupyter_server/services/kernels/kernelmanager.py +++ b/jupyter_server/services/kernels/kernelmanager.py @@ -3,40 +3,49 @@ - raises HTTPErrors - creates REST API models """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - +import os from collections import defaultdict -from datetime import datetime, timedelta +from datetime import datetime +from datetime import timedelta from functools import partial -import os - -from tornado import web -from tornado.concurrent import Future -from tornado.ioloop import IOLoop, PeriodicCallback +from jupyter_client.multikernelmanager import AsyncMultiKernelManager +from jupyter_client.multikernelmanager import MultiKernelManager from jupyter_client.session import Session -from jupyter_client.multikernelmanager import MultiKernelManager, AsyncMultiKernelManager from jupyter_core.paths import exists -from traitlets import (Any, Bool, Dict, List, Unicode, TraitError, Integer, - Float, Instance, default, validate -) - -from jupyter_server.utils import to_os_path, ensure_async -from jupyter_server._tz import utcnow, isoformat - +from tornado import web +from tornado.concurrent import Future +from tornado.ioloop import IOLoop +from tornado.ioloop import PeriodicCallback +from traitlets import Any +from traitlets import Bool +from traitlets import default +from traitlets import Dict +from traitlets import Float +from traitlets import Instance +from traitlets import Integer +from traitlets import List +from traitlets import TraitError +from traitlets import Unicode +from traitlets import validate + +from jupyter_server._tz import isoformat +from jupyter_server._tz import utcnow from jupyter_server.prometheus.metrics import KERNEL_CURRENTLY_RUNNING_TOTAL +from jupyter_server.utils import ensure_async +from jupyter_server.utils import to_os_path class MappingKernelManager(MultiKernelManager): """A KernelManager that handles - - File mapping - - HTTP error handling - - Kernel message filtering + - File mapping + - HTTP error handling + - Kernel message filtering """ - @default('kernel_manager_class') + @default("kernel_manager_class") def _default_kernel_manager_class(self): return "jupyter_client.ioloop.IOLoopKernelManager" @@ -52,17 +61,17 @@ def _default_kernel_manager_class(self): _initialized_culler = False - @default('root_dir') + @default("root_dir") def _default_root_dir(self): try: return self.parent.root_dir except AttributeError: return os.getcwd() - @validate('root_dir') + @validate("root_dir") def _update_root_dir(self, proposal): """Do a bit of validation of the root dir.""" - value = proposal['value'] + value = proposal["value"] if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) @@ -70,28 +79,38 @@ def _update_root_dir(self, proposal): raise TraitError("kernel root dir %r is not a directory" % value) return value - cull_idle_timeout = Integer(0, config=True, + cull_idle_timeout = Integer( + 0, + config=True, help="""Timeout (in seconds) after which a kernel is considered idle and ready to be culled. Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled - for users with poor network connections.""" + for users with poor network connections.""", ) - cull_interval_default = 300 # 5 minutes - cull_interval = Integer(cull_interval_default, config=True, - help="""The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value.""" + cull_interval_default = 300 # 5 minutes + cull_interval = Integer( + cull_interval_default, + config=True, + help="""The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value.""", ) - cull_connected = Bool(False, config=True, + cull_connected = Bool( + False, + config=True, help="""Whether to consider culling kernels which have one or more connections. - Only effective if cull_idle_timeout > 0.""" + Only effective if cull_idle_timeout > 0.""", ) - cull_busy = Bool(False, config=True, + cull_busy = Bool( + False, + config=True, help="""Whether to consider culling kernels which are busy. - Only effective if cull_idle_timeout > 0.""" + Only effective if cull_idle_timeout > 0.""", ) - buffer_offline_messages = Bool(True, config=True, + buffer_offline_messages = Bool( + True, + config=True, help="""Whether messages from kernels whose frontends have disconnected should be buffered in-memory. When True (default), messages are buffered and replayed on reconnect, @@ -99,10 +118,12 @@ def _update_root_dir(self, proposal): Disable if long-running kernels will produce too much output while no frontends are connected. - """ + """, ) - kernel_info_timeout = Float(60, config=True, + kernel_info_timeout = Float( + 60, + config=True, help="""Timeout for giving up on a kernel (in seconds). On starting and restarting kernels, we check whether the @@ -111,43 +132,45 @@ def _update_root_dir(self, proposal): before being presumed dead. This affects the MappingKernelManager (which handles kernel restarts) and the ZMQChannelsHandler (which handles the startup). - """ + """, ) _kernel_buffers = Any() - @default('_kernel_buffers') + + @default("_kernel_buffers") def _default_kernel_buffers(self): - return defaultdict(lambda: {'buffer': [], 'session_key': '', 'channels': {}}) + return defaultdict(lambda: {"buffer": [], "session_key": "", "channels": {}}) - last_kernel_activity = Instance(datetime, - help="The last activity on any kernel, including shutting down a kernel") + last_kernel_activity = Instance( + datetime, help="The last activity on any kernel, including shutting down a kernel" + ) def __init__(self, **kwargs): self.pinned_superclass = MultiKernelManager self.pinned_superclass.__init__(self, **kwargs) self.last_kernel_activity = utcnow() - allowed_message_types = List(trait=Unicode(), config=True, + allowed_message_types = List( + trait=Unicode(), + config=True, help="""White list of allowed kernel message types. When the list is empty, all message types are allowed. - """ + """, ) - allow_tracebacks = Bool(True, config=True, help=( - 'Whether to send tracebacks to clients on exceptions.' - )) + allow_tracebacks = Bool( + True, config=True, help=("Whether to send tracebacks to clients on exceptions.") + ) traceback_replacement_message = Unicode( - 'An exception occurred at runtime, which is not shown due to security reasons.', + "An exception occurred at runtime, which is not shown due to security reasons.", config=True, - help=( - 'Message to print when allow_tracebacks is False, and an exception occurs' - ) + help=("Message to print when allow_tracebacks is False, and an exception occurs"), ) - #------------------------------------------------------------------------- + # ------------------------------------------------------------------------- # Methods for managing kernels and sessions - #------------------------------------------------------------------------- + # ------------------------------------------------------------------------- def _handle_kernel_died(self, kernel_id): """notice that a kernel died""" @@ -181,9 +204,9 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): """ if kernel_id is None or kernel_id not in self: if path is not None: - kwargs['cwd'] = self.cwd_for_path(path) + kwargs["cwd"] = self.cwd_for_path(path) if kernel_id is not None: - kwargs['kernel_id'] = kernel_id + kwargs["kernel_id"] = kernel_id kernel_id = await ensure_async(self.pinned_superclass.start_kernel(self, **kwargs)) self._kernel_connections[kernel_id] = 0 self._kernel_ports[kernel_id] = self._kernels[kernel_id].ports @@ -191,16 +214,15 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): self.log.info("Kernel started: %s" % kernel_id) self.log.debug("Kernel args: %r" % kwargs) # register callback for failed auto-restart - self.add_restart_callback(kernel_id, - lambda : self._handle_kernel_died(kernel_id), - 'dead', + self.add_restart_callback( + kernel_id, + lambda: self._handle_kernel_died(kernel_id), + "dead", ) # Increase the metric of number of kernels running # for the relevant kernel type by 1 - KERNEL_CURRENTLY_RUNNING_TOTAL.labels( - type=self._kernels[kernel_id].kernel_name - ).inc() + KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).inc() else: self.log.info("Using existing kernel: %s" % kernel_id) @@ -271,15 +293,15 @@ def start_buffering(self, kernel_id, session_key, channels): self.stop_buffering(kernel_id) buffer_info = self._kernel_buffers[kernel_id] # record the session key because only one session can buffer - buffer_info['session_key'] = session_key + buffer_info["session_key"] = session_key # TODO: the buffer should likely be a memory bounded queue, we're starting with a list to keep it simple - buffer_info['buffer'] = [] - buffer_info['channels'] = channels + buffer_info["buffer"] = [] + buffer_info["channels"] = channels # forward any future messages to the internal buffer def buffer_msg(channel, msg_parts): self.log.debug("Buffering msg on %s:%s", kernel_id, channel) - buffer_info['buffer'].append((channel, msg_parts)) + buffer_info["buffer"].append((channel, msg_parts)) for channel, stream in channels.items(): stream.on_recv(partial(buffer_msg, channel)) @@ -301,7 +323,7 @@ def get_buffer(self, kernel_id, session_key): return buffer_info = self._kernel_buffers[kernel_id] - if buffer_info['session_key'] == session_key: + if buffer_info["session_key"] == session_key: # remove buffer self._kernel_buffers.pop(kernel_id) # only return buffer_info if it's a match @@ -324,15 +346,18 @@ def stop_buffering(self, kernel_id): return buffer_info = self._kernel_buffers.pop(kernel_id) # close buffering streams - for stream in buffer_info['channels'].values(): + for stream in buffer_info["channels"].values(): if not stream.closed(): stream.on_recv(None) stream.close() - msg_buffer = buffer_info['buffer'] + msg_buffer = buffer_info["buffer"] if msg_buffer: - self.log.info("Discarding %s buffered messages for %s", - len(msg_buffer), buffer_info['session_key']) + self.log.info( + "Discarding %s buffered messages for %s", + len(msg_buffer), + buffer_info["session_key"], + ) def shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by kernel_id""" @@ -343,9 +368,7 @@ def shutdown_kernel(self, kernel_id, now=False, restart=False): # Decrease the metric of number of kernels # running for the relevant kernel type by 1 - KERNEL_CURRENTLY_RUNNING_TOTAL.labels( - type=self._kernels[kernel_id].kernel_name - ).dec() + KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) # Unlike its async sibling method in AsyncMappingKernelManager, removing the kernel_id @@ -369,7 +392,7 @@ def finish(): if not channel.closed(): channel.close() loop.remove_timeout(timeout) - kernel.remove_restart_callback(on_restart_failed, 'dead') + kernel.remove_restart_callback(on_restart_failed, "dead") def on_reply(msg): self.log.debug("Kernel info reply received: %s", kernel_id) @@ -389,7 +412,7 @@ def on_restart_failed(): if not future.done(): future.set_exception(RuntimeError("Restart failed")) - kernel.add_restart_callback(on_restart_failed, 'dead') + kernel.add_restart_callback(on_restart_failed, "dead") kernel.session.send(channel, "kernel_info_request") channel.on_recv(on_reply) loop = IOLoop.current() @@ -419,7 +442,7 @@ def kernel_model(self, kernel_id): kernel = self._kernels[kernel_id] model = { - "id":kernel_id, + "id": kernel_id, "name": kernel.kernel_name, "last_activity": isoformat(kernel.last_activity), "execution_state": kernel.execution_state, @@ -443,7 +466,7 @@ def list_kernels(self): def _check_kernel_id(self, kernel_id): """Check a that a kernel_id exists and raise 404 if not.""" if kernel_id not in self: - raise web.HTTPError(404, u'Kernel does not exist: %s' % kernel_id) + raise web.HTTPError(404, "Kernel does not exist: %s" % kernel_id) # monitoring activity: @@ -455,7 +478,7 @@ def start_watching_activity(self, kernel_id): """ kernel = self._kernels[kernel_id] # add busy/activity markers: - kernel.execution_state = 'starting' + kernel.execution_state = "starting" kernel.last_activity = utcnow() kernel._activity_stream = kernel.connect_iopub() session = Session( @@ -470,10 +493,12 @@ def record_activity(msg_list): idents, fed_msg_list = session.feed_identities(msg_list) msg = session.deserialize(fed_msg_list) - msg_type = msg['header']['msg_type'] - if msg_type == 'status': - kernel.execution_state = msg['content']['execution_state'] - self.log.debug("activity on %s: %s (%s)", kernel_id, msg_type, kernel.execution_state) + msg_type = msg["header"]["msg_type"] + if msg_type == "status": + kernel.execution_state = msg["content"]["execution_state"] + self.log.debug( + "activity on %s: %s (%s)", kernel_id, msg_type, kernel.execution_state + ) else: self.log.debug("activity on %s: %s", kernel_id, msg_type) @@ -494,14 +519,21 @@ def initialize_culler(self): if not self._initialized_culler and self.cull_idle_timeout > 0: if self._culler_callback is None: loop = IOLoop.current() - if self.cull_interval <= 0: #handle case where user set invalid value - self.log.warning("Invalid value for 'cull_interval' detected (%s) - using default value (%s).", - self.cull_interval, self.cull_interval_default) + if self.cull_interval <= 0: # handle case where user set invalid value + self.log.warning( + "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", + self.cull_interval, + self.cull_interval_default, + ) self.cull_interval = self.cull_interval_default self._culler_callback = PeriodicCallback( - self.cull_kernels, 1000*self.cull_interval) - self.log.info("Culling kernels with idle durations > %s seconds at %s second intervals ...", - self.cull_idle_timeout, self.cull_interval) + self.cull_kernels, 1000 * self.cull_interval + ) + self.log.info( + "Culling kernels with idle durations > %s seconds at %s second intervals ...", + self.cull_idle_timeout, + self.cull_interval, + ) if self.cull_busy: self.log.info("Culling kernels even if busy") if self.cull_connected: @@ -511,41 +543,58 @@ def initialize_culler(self): self._initialized_culler = True async def cull_kernels(self): - self.log.debug("Polling every %s seconds for kernels idle > %s seconds...", - self.cull_interval, self.cull_idle_timeout) + self.log.debug( + "Polling every %s seconds for kernels idle > %s seconds...", + self.cull_interval, + self.cull_idle_timeout, + ) """Create a separate list of kernels to avoid conflicting updates while iterating""" for kernel_id in list(self._kernels): try: await self.cull_kernel_if_idle(kernel_id) except Exception as e: - self.log.exception("The following exception was encountered while checking the idle duration of kernel %s: %s", - kernel_id, e) + self.log.exception( + "The following exception was encountered while checking the idle duration of kernel %s: %s", + kernel_id, + e, + ) async def cull_kernel_if_idle(self, kernel_id): kernel = self._kernels[kernel_id] - if hasattr(kernel, 'last_activity'): # last_activity is monkey-patched, so ensure that has occurred - self.log.debug("kernel_id=%s, kernel_name=%s, last_activity=%s", - kernel_id, kernel.kernel_name, kernel.last_activity) + if hasattr( + kernel, "last_activity" + ): # last_activity is monkey-patched, so ensure that has occurred + self.log.debug( + "kernel_id=%s, kernel_name=%s, last_activity=%s", + kernel_id, + kernel.kernel_name, + kernel.last_activity, + ) dt_now = utcnow() dt_idle = dt_now - kernel.last_activity # Compute idle properties is_idle_time = dt_idle > timedelta(seconds=self.cull_idle_timeout) - is_idle_execute = self.cull_busy or (kernel.execution_state != 'busy') + is_idle_execute = self.cull_busy or (kernel.execution_state != "busy") connections = self._kernel_connections.get(kernel_id, 0) is_idle_connected = self.cull_connected or not connections # Cull the kernel if all three criteria are met - if (is_idle_time and is_idle_execute and is_idle_connected): + if is_idle_time and is_idle_execute and is_idle_connected: idle_duration = int(dt_idle.total_seconds()) - self.log.warning("Culling '%s' kernel '%s' (%s) with %d connections due to %s seconds of inactivity.", - kernel.execution_state, kernel.kernel_name, kernel_id, connections, idle_duration) + self.log.warning( + "Culling '%s' kernel '%s' (%s) with %d connections due to %s seconds of inactivity.", + kernel.execution_state, + kernel.kernel_name, + kernel_id, + connections, + idle_duration, + ) await ensure_async(self.shutdown_kernel(kernel_id)) # AsyncMappingKernelManager inherits as much as possible from MappingKernelManager, # overriding only what is different. class AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager): - - @default('kernel_manager_class') + @default("kernel_manager_class") def _default_kernel_manager_class(self): return "jupyter_client.ioloop.AsyncIOLoopKernelManager" @@ -562,12 +611,12 @@ async def shutdown_kernel(self, kernel_id, now=False, restart=False): # Decrease the metric of number of kernels # running for the relevant kernel type by 1 - KERNEL_CURRENTLY_RUNNING_TOTAL.labels( - type=self._kernels[kernel_id].kernel_name - ).dec() + KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() # Finish shutting down the kernel before clearing state to avoid a race condition. - ret = await self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) + ret = await self.pinned_superclass.shutdown_kernel( + self, kernel_id, now=now, restart=restart + ) self._kernel_connections.pop(kernel_id, None) self._kernel_ports.pop(kernel_id, None) return ret diff --git a/jupyter_server/services/kernelspecs/handlers.py b/jupyter_server/services/kernelspecs/handlers.py index 310caec99f..1ac353ba62 100644 --- a/jupyter_server/services/kernelspecs/handlers.py +++ b/jupyter_server/services/kernelspecs/handlers.py @@ -2,13 +2,12 @@ Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-25%3A-Registry-of-installed-kernels#rest-api """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import glob import json import os + pjoin = os.path.join from tornado import web @@ -17,68 +16,60 @@ from ...utils import ensure_async, url_path_join, url_unescape - def kernelspec_model(handler, name, spec_dict, resource_dir): """Load a KernelSpec by name and return the REST API model""" - d = { - 'name': name, - 'spec': spec_dict, - 'resources': {} - } + d = {"name": name, "spec": spec_dict, "resources": {}} # Add resource files if they exist resource_dir = resource_dir - for resource in ['kernel.js', 'kernel.css']: + for resource in ["kernel.js", "kernel.css"]: if os.path.exists(pjoin(resource_dir, resource)): - d['resources'][resource] = url_path_join( - handler.base_url, - 'kernelspecs', - name, - resource + d["resources"][resource] = url_path_join( + handler.base_url, "kernelspecs", name, resource ) - for logo_file in glob.glob(pjoin(resource_dir, 'logo-*')): + for logo_file in glob.glob(pjoin(resource_dir, "logo-*")): fname = os.path.basename(logo_file) no_ext, _ = os.path.splitext(fname) - d['resources'][no_ext] = url_path_join( - handler.base_url, - 'kernelspecs', - name, - fname - ) + d["resources"][no_ext] = url_path_join(handler.base_url, "kernelspecs", name, fname) return d def is_kernelspec_model(spec_dict): """Returns True if spec_dict is already in proper form. This will occur when using a gateway.""" - return isinstance(spec_dict, dict) and 'name' in spec_dict and 'spec' in spec_dict and 'resources' in spec_dict + return ( + isinstance(spec_dict, dict) + and "name" in spec_dict + and "spec" in spec_dict + and "resources" in spec_dict + ) class MainKernelSpecHandler(APIHandler): - @web.authenticated async def get(self): ksm = self.kernel_spec_manager km = self.kernel_manager model = {} - model['default'] = km.default_kernel_name - model['kernelspecs'] = specs = {} + model["default"] = km.default_kernel_name + model["kernelspecs"] = specs = {} kspecs = await ensure_async(ksm.get_all_specs()) for kernel_name, kernel_info in kspecs.items(): try: if is_kernelspec_model(kernel_info): d = kernel_info else: - d = kernelspec_model(self, kernel_name, kernel_info['spec'], kernel_info['resource_dir']) + d = kernelspec_model( + self, kernel_name, kernel_info["spec"], kernel_info["resource_dir"] + ) except Exception: self.log.error("Failed to load kernel spec: '%s'", kernel_name, exc_info=True) continue specs[kernel_name] = d - self.set_header("Content-Type", 'application/json') + self.set_header("Content-Type", "application/json") self.finish(json.dumps(model)) class KernelSpecHandler(APIHandler): - @web.authenticated async def get(self, kernel_name): ksm = self.kernel_spec_manager @@ -86,12 +77,12 @@ async def get(self, kernel_name): try: spec = await ensure_async(ksm.get_kernel_spec(kernel_name)) except KeyError as e: - raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name) from e + raise web.HTTPError(404, u"Kernel spec %s not found" % kernel_name) from e if is_kernelspec_model(spec): model = spec else: model = kernelspec_model(self, kernel_name, spec.to_dict(), spec.resource_dir) - self.set_header("Content-Type", 'application/json') + self.set_header("Content-Type", "application/json") self.finish(json.dumps(model)) diff --git a/jupyter_server/services/nbconvert/handlers.py b/jupyter_server/services/nbconvert/handlers.py index e6d99e3bd4..67f1c4afb9 100644 --- a/jupyter_server/services/nbconvert/handlers.py +++ b/jupyter_server/services/nbconvert/handlers.py @@ -1,5 +1,5 @@ -import json import asyncio +import json from anyio.to_thread import run_sync from tornado import web @@ -9,8 +9,8 @@ LOCK = asyncio.Lock() -class NbconvertRootHandler(APIHandler): +class NbconvertRootHandler(APIHandler): @web.authenticated async def get(self): try: @@ -40,6 +40,7 @@ async def get(self): self.finish(json.dumps(res)) + default_handlers = [ (r"/api/nbconvert", NbconvertRootHandler), ] diff --git a/jupyter_server/services/security/handlers.py b/jupyter_server/services/security/handlers.py index 82a00d234b..91cd9a4834 100644 --- a/jupyter_server/services/security/handlers.py +++ b/jupyter_server/services/security/handlers.py @@ -1,15 +1,14 @@ """Tornado handlers for security logging.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - from tornado import web -from ...base.handlers import APIHandler from . import csp_report_uri +from ...base.handlers import APIHandler + class CSPReportHandler(APIHandler): - '''Accepts a content security policy violation report''' + """Accepts a content security policy violation report""" _track_activity = False @@ -23,10 +22,10 @@ def check_xsrf_cookie(self): @web.authenticated def post(self): - '''Log a content security policy violation report''' - self.log.warning("Content security violation: %s", - self.request.body.decode('utf8', 'replace')) + """Log a content security policy violation report""" + self.log.warning( + "Content security violation: %s", self.request.body.decode("utf8", "replace") + ) + -default_handlers = [ - (csp_report_uri, CSPReportHandler) -] +default_handlers = [(csp_report_uri, CSPReportHandler)] diff --git a/jupyter_server/services/sessions/handlers.py b/jupyter_server/services/sessions/handlers.py index 6eb8bb4d31..07230cfb8d 100644 --- a/jupyter_server/services/sessions/handlers.py +++ b/jupyter_server/services/sessions/handlers.py @@ -2,22 +2,20 @@ Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-16%3A-Notebook-multi-directory-dashboard-and-URL-mapping#sessions-api """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import json +from jupyter_client.jsonutil import date_default +from jupyter_client.kernelspec import NoSuchKernel from tornado import web from ...base.handlers import APIHandler -from jupyter_client.jsonutil import date_default -from jupyter_server.utils import url_path_join, ensure_async -from jupyter_client.kernelspec import NoSuchKernel +from jupyter_server.utils import ensure_async +from jupyter_server.utils import url_path_join class SessionRootHandler(APIHandler): - @web.authenticated async def get(self): # Return a list of running sessions @@ -28,32 +26,32 @@ async def get(self): @web.authenticated async def post(self): # Creates a new session - #(unless a session already exists for the named session) + # (unless a session already exists for the named session) sm = self.session_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, "No JSON data provided") - if 'notebook' in model and 'path' in model['notebook']: - self.log.warning('Sessions API changed, see updated swagger docs') - model['path'] = model['notebook']['path'] - model['type'] = 'notebook' + if "notebook" in model and "path" in model["notebook"]: + self.log.warning("Sessions API changed, see updated swagger docs") + model["path"] = model["notebook"]["path"] + model["type"] = "notebook" try: - path = model['path'] + path = model["path"] except KeyError as e: raise web.HTTPError(400, "Missing field in JSON data: path") from e try: - mtype = model['type'] + mtype = model["type"] except KeyError as e: raise web.HTTPError(400, "Missing field in JSON data: type") from e - name = model.get('name', None) - kernel = model.get('kernel', {}) - kernel_name = kernel.get('name', None) - kernel_id = kernel.get('id', None) + name = model.get("name", None) + kernel = model.get("kernel", {}) + kernel_name = kernel.get("name", None) + kernel_id = kernel.get("id", None) if not kernel_id and not kernel_name: self.log.debug("No kernel specified, using default kernel") @@ -65,26 +63,26 @@ async def post(self): else: try: model = await sm.create_session( - path=path, kernel_name=kernel_name, - kernel_id=kernel_id, name=name, - type=mtype) + path=path, kernel_name=kernel_name, kernel_id=kernel_id, name=name, type=mtype + ) except NoSuchKernel: - msg = ("The '%s' kernel is not available. Please pick another " - "suitable kernel instead, or install that kernel." % kernel_name) - status_msg = '%s not found' % kernel_name - self.log.warning('Kernel not found: %s' % kernel_name) + msg = ( + "The '%s' kernel is not available. Please pick another " + "suitable kernel instead, or install that kernel." % kernel_name + ) + status_msg = "%s not found" % kernel_name + self.log.warning("Kernel not found: %s" % kernel_name) self.set_status(501) self.finish(json.dumps(dict(message=msg, short_message=status_msg))) return - location = url_path_join(self.base_url, 'api', 'sessions', model['id']) - self.set_header('Location', location) + location = url_path_join(self.base_url, "api", "sessions", model["id"]) + self.set_header("Location", location) self.set_status(201) self.finish(json.dumps(model, default=date_default)) class SessionHandler(APIHandler): - @web.authenticated async def get(self, session_id): # Returns the JSON model for a single session @@ -109,37 +107,41 @@ async def patch(self, session_id): before = await sm.get_session(session_id=session_id) changes = {} - if 'notebook' in model and 'path' in model['notebook']: - self.log.warning('Sessions API changed, see updated swagger docs') - model['path'] = model['notebook']['path'] - model['type'] = 'notebook' - if 'path' in model: - changes['path'] = model['path'] - if 'name' in model: - changes['name'] = model['name'] - if 'type' in model: - changes['type'] = model['type'] - if 'kernel' in model: + if "notebook" in model and "path" in model["notebook"]: + self.log.warning("Sessions API changed, see updated swagger docs") + model["path"] = model["notebook"]["path"] + model["type"] = "notebook" + if "path" in model: + changes["path"] = model["path"] + if "name" in model: + changes["name"] = model["name"] + if "type" in model: + changes["type"] = model["type"] + if "kernel" in model: # Kernel id takes precedence over name. - if model['kernel'].get('id') is not None: - kernel_id = model['kernel']['id'] + if model["kernel"].get("id") is not None: + kernel_id = model["kernel"]["id"] if kernel_id not in km: raise web.HTTPError(400, "No such kernel: %s" % kernel_id) - changes['kernel_id'] = kernel_id - elif model['kernel'].get('name') is not None: - kernel_name = model['kernel']['name'] + changes["kernel_id"] = kernel_id + elif model["kernel"].get("name") is not None: + kernel_name = model["kernel"]["name"] kernel_id = await sm.start_kernel_for_session( - session_id, kernel_name=kernel_name, name=before['name'], - path=before['path'], type=before['type']) - changes['kernel_id'] = kernel_id + session_id, + kernel_name=kernel_name, + name=before["name"], + path=before["path"], + type=before["type"], + ) + changes["kernel_id"] = kernel_id await sm.update_session(session_id, **changes) model = await sm.get_session(session_id=session_id) - if model['kernel']['id'] != before['kernel']['id']: + if model["kernel"]["id"] != before["kernel"]["id"]: # kernel_id changed because we got a new kernel # shutdown the old one - await ensure_async(km.shutdown_kernel(before['kernel']['id'])) + await ensure_async(km.shutdown_kernel(before["kernel"]["id"])) self.finish(json.dumps(model, default=date_default)) @web.authenticated @@ -155,13 +157,13 @@ async def delete(self, session_id): self.finish() -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _session_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" default_handlers = [ (r"/api/sessions/%s" % _session_id_regex, SessionHandler), - (r"/api/sessions", SessionRootHandler) + (r"/api/sessions", SessionRootHandler), ] diff --git a/jupyter_server/services/sessions/sessionmanager.py b/jupyter_server/services/sessions/sessionmanager.py index 1ecff95806..bc441a5567 100644 --- a/jupyter_server/services/sessions/sessionmanager.py +++ b/jupyter_server/services/sessions/sessionmanager.py @@ -1,8 +1,6 @@ """A base class session manager.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - import uuid try: @@ -22,33 +20,35 @@ class SessionManager(LoggingConfigurable): - kernel_manager = Instance('jupyter_server.services.kernels.kernelmanager.MappingKernelManager') + kernel_manager = Instance("jupyter_server.services.kernels.kernelmanager.MappingKernelManager") contents_manager = InstanceFromClasses( [ - 'jupyter_server.services.contents.manager.ContentsManager', - 'notebook.services.contents.manager.ContentsManager' + "jupyter_server.services.contents.manager.ContentsManager", + "notebook.services.contents.manager.ContentsManager", ] ) # Session database initialized below _cursor = None _connection = None - _columns = {'session_id', 'path', 'name', 'type', 'kernel_id'} + _columns = {"session_id", "path", "name", "type", "kernel_id"} @property def cursor(self): """Start a cursor and create a database called 'session'""" if self._cursor is None: self._cursor = self.connection.cursor() - self._cursor.execute("""CREATE TABLE session - (session_id, path, name, type, kernel_id)""") + self._cursor.execute( + """CREATE TABLE session + (session_id, path, name, type, kernel_id)""" + ) return self._cursor @property def connection(self): """Start a database connection""" if self._connection is None: - self._connection = sqlite3.connect(':memory:') + self._connection = sqlite3.connect(":memory:") self._connection.row_factory = sqlite3.Row return self._connection @@ -82,21 +82,29 @@ def new_session_id(self): "Create a uuid for a new session" return str(uuid.uuid4()) - async def create_session(self, path=None, name=None, type=None, kernel_name=None, kernel_id=None): + async def create_session( + self, path=None, name=None, type=None, kernel_name=None, kernel_id=None + ): """Creates a session and returns its model""" session_id = self.new_session_id() if kernel_id is not None and kernel_id in self.kernel_manager: pass else: - kernel_id = await self.start_kernel_for_session(session_id, path, name, type, kernel_name) - result = await self.save_session(session_id, path=path, name=name, type=type, kernel_id=kernel_id) + kernel_id = await self.start_kernel_for_session( + session_id, path, name, type, kernel_name + ) + result = await self.save_session( + session_id, path=path, name=name, type=type, kernel_id=kernel_id + ) return result async def start_kernel_for_session(self, session_id, path, name, type, kernel_name): """Start a new kernel for a given session.""" # allow contents manager to specify kernels cwd kernel_path = self.contents_manager.get_kernel_path(path=path) - kernel_id = await self.kernel_manager.start_kernel(path=kernel_path, kernel_name=kernel_name) + kernel_id = await self.kernel_manager.start_kernel( + path=kernel_path, kernel_name=kernel_name + ) return kernel_id async def save_session(self, session_id, path=None, name=None, type=None, kernel_id=None): @@ -124,8 +132,8 @@ async def save_session(self, session_id, path=None, name=None, type=None, kernel model : dict a dictionary of the session model """ - self.cursor.execute("INSERT INTO session VALUES (?,?,?,?,?)", - (session_id, path, name, type, kernel_id) + self.cursor.execute( + "INSERT INTO session VALUES (?,?,?,?,?)", (session_id, path, name, type, kernel_id) ) result = await self.get_session(session_id=session_id) return result @@ -157,7 +165,7 @@ async def get_session(self, **kwargs): raise TypeError("No such column: %r", column) conditions.append("%s=?" % column) - query = "SELECT * FROM session WHERE %s" % (' AND '.join(conditions)) + query = "SELECT * FROM session WHERE %s" % (" AND ".join(conditions)) self.cursor.execute(query, list(kwargs.values())) try: @@ -171,7 +179,7 @@ async def get_session(self, **kwargs): for key, value in kwargs.items(): q.append("%s=%r" % (key, value)) - raise web.HTTPError(404, u'Session not found: %s' % (', '.join(q))) + raise web.HTTPError(404, u"Session not found: %s" % (", ".join(q))) model = await self.row_to_model(row) return model @@ -202,7 +210,7 @@ async def update_session(self, session_id, **kwargs): if column not in self._columns: raise TypeError("No such column: %r" % column) sets.append("%s=?" % column) - query = "UPDATE session SET %s WHERE session_id=?" % (', '.join(sets)) + query = "UPDATE session SET %s WHERE session_id=?" % (", ".join(sets)) self.cursor.execute(query, list(kwargs.values()) + [session_id]) def kernel_culled(self, kernel_id): @@ -211,7 +219,7 @@ def kernel_culled(self, kernel_id): async def row_to_model(self, row, tolerate_culled=False): """Takes sqlite database session row and turns it into a dictionary""" - kernel_culled = await ensure_async(self.kernel_culled(row['kernel_id'])) + kernel_culled = await ensure_async(self.kernel_culled(row["kernel_id"])) if kernel_culled: # The kernel was culled or died without deleting the session. # We can't use delete_session here because that tries to find @@ -220,27 +228,29 @@ async def row_to_model(self, row, tolerate_culled=False): # If caller wishes to tolerate culled kernels, log a warning # and return None. Otherwise, raise KeyError with a similar # message. - self.cursor.execute("DELETE FROM session WHERE session_id=?", - (row['session_id'],)) - msg = "Kernel '{kernel_id}' appears to have been culled or died unexpectedly, " \ - "invalidating session '{session_id}'. The session has been removed.".\ - format(kernel_id=row['kernel_id'],session_id=row['session_id']) + self.cursor.execute("DELETE FROM session WHERE session_id=?", (row["session_id"],)) + msg = ( + "Kernel '{kernel_id}' appears to have been culled or died unexpectedly, " + "invalidating session '{session_id}'. The session has been removed.".format( + kernel_id=row["kernel_id"], session_id=row["session_id"] + ) + ) if tolerate_culled: self.log.warning(msg + " Continuing...") return raise KeyError(msg) - kernel_model = await ensure_async(self.kernel_manager.kernel_model(row['kernel_id'])) + kernel_model = await ensure_async(self.kernel_manager.kernel_model(row["kernel_id"])) model = { - 'id': row['session_id'], - 'path': row['path'], - 'name': row['name'], - 'type': row['type'], - 'kernel': kernel_model + "id": row["session_id"], + "path": row["path"], + "name": row["name"], + "type": row["type"], + "kernel": kernel_model, } - if row['type'] == 'notebook': + if row["type"] == "notebook": # Provide the deprecated API. - model['notebook'] = {'path': row['path'], 'name': row['name']} + model["notebook"] = {"path": row["path"], "name": row["name"]} return model async def list_sessions(self): @@ -261,5 +271,5 @@ async def list_sessions(self): async def delete_session(self, session_id): """Deletes the row in the session database with given session_id""" session = await self.get_session(session_id=session_id) - await ensure_async(self.kernel_manager.shutdown_kernel(session['kernel']['id'])) + await ensure_async(self.kernel_manager.shutdown_kernel(session["kernel"]["id"])) self.cursor.execute("DELETE FROM session WHERE session_id=?", (session_id,)) diff --git a/jupyter_server/services/shutdown.py b/jupyter_server/services/shutdown.py index c7f5361c42..31a60b9924 100644 --- a/jupyter_server/services/shutdown.py +++ b/jupyter_server/services/shutdown.py @@ -1,6 +1,8 @@ """HTTP handler to shut down the Jupyter server. """ -from tornado import web, ioloop +from tornado import ioloop +from tornado import web + from jupyter_server.base.handlers import JupyterHandler diff --git a/jupyter_server/static/style/index.css b/jupyter_server/static/style/index.css index a9ce230b64..3e50941cf5 100644 --- a/jupyter_server/static/style/index.css +++ b/jupyter_server/static/style/index.css @@ -1,92 +1,91 @@ - #jupyter_server { - padding-left: 0px; - padding-top: 1px; - padding-bottom: 1px; + padding-left: 0px; + padding-top: 1px; + padding-bottom: 1px; } #jupyter_server img { - height: 28px; + height: 28px; } #jupyter-main-app { - padding-top: 50px; - text-align: center; + padding-top: 50px; + text-align: center; } body { - font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; - font-size: 13px; - line-height: 1.42857143; - color: #000; + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 13px; + line-height: 1.42857143; + color: #000; } body > #header { - display: block; - background-color: #fff; - position: relative; - z-index: 100; + display: block; + background-color: #fff; + position: relative; + z-index: 100; } body > #header #header-container { - display: flex; - flex-direction: row; - justify-content: space-between; - padding: 5px; - padding-top: 5px; - padding-bottom: 5px; - padding-bottom: 5px; - padding-top: 5px; - box-sizing: border-box; - -moz-box-sizing: border-box; - -webkit-box-sizing: border-box; + display: flex; + flex-direction: row; + justify-content: space-between; + padding: 5px; + padding-top: 5px; + padding-bottom: 5px; + padding-bottom: 5px; + padding-top: 5px; + box-sizing: border-box; + -moz-box-sizing: border-box; + -webkit-box-sizing: border-box; } body > #header .header-bar { - width: 100%; - height: 1px; - background: #e7e7e7; - margin-bottom: -1px; + width: 100%; + height: 1px; + background: #e7e7e7; + margin-bottom: -1px; } .navbar-brand { - float: left; - height: 30px; - padding: 6px 0px; - padding-top: 6px; - padding-bottom: 6px; - padding-left: 0px; - font-size: 17px; - line-height: 18px; + float: left; + height: 30px; + padding: 6px 0px; + padding-top: 6px; + padding-bottom: 6px; + padding-left: 0px; + font-size: 17px; + line-height: 18px; } -.navbar-brand, .navbar-nav > li > a { - text-shadow: 0 1px 0 rgba(255,255,255,.25); +.navbar-brand, +.navbar-nav > li > a { + text-shadow: 0 1px 0 rgba(255, 255, 255, 0.25); } .nav { - padding-left: 0; - margin-bottom: 0; - list-style: none; + padding-left: 0; + margin-bottom: 0; + list-style: none; } .center-nav { - display: inline-block; - margin-bottom: -4px; + display: inline-block; + margin-bottom: -4px; } - div.error { - margin: 2em; - text-align: center; + margin: 2em; + text-align: center; } div.error > h1 { - font-size: 500%; - line-height: normal; + font-size: 500%; + line-height: normal; } div.error > p { - font-size: 200%; - line-height: normal; + font-size: 200%; + line-height: normal; } diff --git a/jupyter_server/templates/404.html b/jupyter_server/templates/404.html index e29646af1a..e4030816ba 100644 --- a/jupyter_server/templates/404.html +++ b/jupyter_server/templates/404.html @@ -2,4 +2,3 @@ {% block error_detail %}

{% trans %}You are requesting a page that does not exist!{% endtrans %}

{% endblock %} - diff --git a/jupyter_server/templates/logout.html b/jupyter_server/templates/logout.html index 1208868648..81121e1bf8 100644 --- a/jupyter_server/templates/logout.html +++ b/jupyter_server/templates/logout.html @@ -32,4 +32,3 @@
{% endblock %} - diff --git a/jupyter_server/terminal/__init__.py b/jupyter_server/terminal/__init__.py index 6e9171cb97..8bac278403 100644 --- a/jupyter_server/terminal/__init__.py +++ b/jupyter_server/terminal/__init__.py @@ -2,9 +2,10 @@ import sys import terminado + from ..utils import check_version -if not check_version(terminado.__version__, '0.8.3'): +if not check_version(terminado.__version__, "0.8.3"): raise ImportError("terminado >= 0.8.3 required, found %s" % terminado.__version__) from ipython_genutils.py3compat import which @@ -15,35 +16,35 @@ def initialize(webapp, root_dir, connection_url, settings): - if os.name == 'nt': - default_shell = 'powershell.exe' + if os.name == "nt": + default_shell = "powershell.exe" else: - default_shell = which('sh') - shell_override = settings.get('shell_command') - shell = ( - [os.environ.get('SHELL') or default_shell] - if shell_override is None - else shell_override - ) + default_shell = which("sh") + shell_override = settings.get("shell_command") + shell = [os.environ.get("SHELL") or default_shell] if shell_override is None else shell_override # When the notebook server is not running in a terminal (e.g. when # it's launched by a JupyterHub spawner), it's likely that the user # environment hasn't been fully set up. In that case, run a login # shell to automatically source /etc/profile and the like, unless # the user has specifically set a preferred shell command. - if os.name != 'nt' and shell_override is None and not sys.stdout.isatty(): - shell.append('-l') - terminal_manager = webapp.settings['terminal_manager'] = TerminalManager( + if os.name != "nt" and shell_override is None and not sys.stdout.isatty(): + shell.append("-l") + terminal_manager = webapp.settings["terminal_manager"] = TerminalManager( shell_command=shell, - extra_env={'JUPYTER_SERVER_ROOT': root_dir, - 'JUPYTER_SERVER_URL': connection_url, - }, - parent=webapp.settings['serverapp'], + extra_env={ + "JUPYTER_SERVER_ROOT": root_dir, + "JUPYTER_SERVER_URL": connection_url, + }, + parent=webapp.settings["serverapp"], ) - terminal_manager.log = webapp.settings['serverapp'].log - base_url = webapp.settings['base_url'] + terminal_manager.log = webapp.settings["serverapp"].log + base_url = webapp.settings["base_url"] handlers = [ - (ujoin(base_url, r"/terminals/websocket/(\w+)"), TermSocket, - {'term_manager': terminal_manager}), + ( + ujoin(base_url, r"/terminals/websocket/(\w+)"), + TermSocket, + {"term_manager": terminal_manager}, + ), (ujoin(base_url, r"/api/terminals"), api_handlers.TerminalRootHandler), (ujoin(base_url, r"/api/terminals/(\w+)"), api_handlers.TerminalHandler), ] diff --git a/jupyter_server/terminal/api_handlers.py b/jupyter_server/terminal/api_handlers.py index 92bb624289..76bfeee7c0 100644 --- a/jupyter_server/terminal/api_handlers.py +++ b/jupyter_server/terminal/api_handlers.py @@ -1,10 +1,11 @@ import json + from tornado import web + from ..base.handlers import APIHandler class TerminalRootHandler(APIHandler): - @web.authenticated def get(self): models = self.terminal_manager.list() @@ -20,7 +21,7 @@ def post(self): class TerminalHandler(APIHandler): - SUPPORTED_METHODS = ('GET', 'DELETE') + SUPPORTED_METHODS = ("GET", "DELETE") @web.authenticated def get(self, name): diff --git a/jupyter_server/terminal/handlers.py b/jupyter_server/terminal/handlers.py index 415df7be93..e56c780dcb 100644 --- a/jupyter_server/terminal/handlers.py +++ b/jupyter_server/terminal/handlers.py @@ -1,18 +1,16 @@ -#encoding: utf-8 +# encoding: utf-8 """Tornado handlers for the terminal emulator.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -from tornado import web import terminado -from jupyter_server._tz import utcnow +from tornado import web + from ..base.handlers import JupyterHandler from ..base.zmqhandlers import WebSocketMixin +from jupyter_server._tz import utcnow class TermSocket(WebSocketMixin, JupyterHandler, terminado.TermSocket): - def origin_check(self): """Terminado adds redundant origin_check Tornado already calls check_origin, so don't do anything here. @@ -35,7 +33,7 @@ def write_message(self, message, binary=False): self._update_activity() def _update_activity(self): - self.application.settings['terminal_last_activity'] = utcnow() + self.application.settings["terminal_last_activity"] = utcnow() # terminal may not be around on deletion/cull if self.term_name in self.terminal_manager.terminals: self.terminal_manager.terminals[self.term_name].last_activity = utcnow() diff --git a/jupyter_server/terminal/terminalmanager.py b/jupyter_server/terminal/terminalmanager.py index 38144855b2..cfbfea8e4c 100644 --- a/jupyter_server/terminal/terminalmanager.py +++ b/jupyter_server/terminal/terminalmanager.py @@ -2,19 +2,20 @@ - raises HTTPErrors - creates REST API models """ - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from datetime import timedelta import terminado - -from datetime import timedelta -from jupyter_server._tz import utcnow, isoformat from tornado import web -from tornado.ioloop import IOLoop, PeriodicCallback +from tornado.ioloop import IOLoop +from tornado.ioloop import PeriodicCallback from traitlets import Integer from traitlets.config import LoggingConfigurable + from ..prometheus.metrics import TERMINAL_CURRENTLY_RUNNING_TOTAL +from jupyter_server._tz import isoformat +from jupyter_server._tz import utcnow class TerminalManager(LoggingConfigurable, terminado.NamedTermManager): @@ -24,15 +25,19 @@ class TerminalManager(LoggingConfigurable, terminado.NamedTermManager): _initialized_culler = False - cull_inactive_timeout = Integer(0, config=True, + cull_inactive_timeout = Integer( + 0, + config=True, help="""Timeout (in seconds) in which a terminal has been inactive and ready to be culled. - Values of 0 or lower disable culling.""" - ) + Values of 0 or lower disable culling.""", + ) cull_interval_default = 300 # 5 minutes - cull_interval = Integer(cull_interval_default, config=True, - help="""The interval (in seconds) on which to check for terminals exceeding the inactive timeout value.""" - ) + cull_interval = Integer( + cull_interval_default, + config=True, + help="""The interval (in seconds) on which to check for terminals exceeding the inactive timeout value.""", + ) # ------------------------------------------------------------------------- # Methods for managing terminals @@ -64,9 +69,7 @@ def list(self): models = [self.get_terminal_model(name) for name in self.terminals] # Update the metric below to the length of the list 'terms' - TERMINAL_CURRENTLY_RUNNING_TOTAL.set( - len(models) - ) + TERMINAL_CURRENTLY_RUNNING_TOTAL.set(len(models)) return models async def terminate(self, name, force=False): @@ -99,7 +102,7 @@ def get_terminal_model(self, name): def _check_terminal(self, name): """Check a that terminal 'name' exists and raise 404 if not.""" if name not in self.terminals: - raise web.HTTPError(404, u'Terminal not found: %s' % name) + raise web.HTTPError(404, u"Terminal not found: %s" % name) def _initialize_culler(self): """Start culler if 'cull_inactive_timeout' is greater than zero. @@ -109,27 +112,39 @@ def _initialize_culler(self): if self._culler_callback is None: loop = IOLoop.current() if self.cull_interval <= 0: # handle case where user set invalid value - self.log.warning("Invalid value for 'cull_interval' detected (%s) - using default value (%s).", - self.cull_interval, self.cull_interval_default) + self.log.warning( + "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", + self.cull_interval, + self.cull_interval_default, + ) self.cull_interval = self.cull_interval_default self._culler_callback = PeriodicCallback( - self._cull_terminals, 1000 * self.cull_interval) - self.log.info("Culling terminals with inactivity > %s seconds at %s second intervals ...", - self.cull_inactive_timeout, self.cull_interval) + self._cull_terminals, 1000 * self.cull_interval + ) + self.log.info( + "Culling terminals with inactivity > %s seconds at %s second intervals ...", + self.cull_inactive_timeout, + self.cull_interval, + ) self._culler_callback.start() self._initialized_culler = True async def _cull_terminals(self): - self.log.debug("Polling every %s seconds for terminals inactive for > %s seconds...", - self.cull_interval, self.cull_inactive_timeout) + self.log.debug( + "Polling every %s seconds for terminals inactive for > %s seconds...", + self.cull_interval, + self.cull_inactive_timeout, + ) # Create a separate list of terminals to avoid conflicting updates while iterating for name in list(self.terminals): try: await self._cull_inactive_terminal(name) except Exception as e: - self.log.exception("The following exception was encountered while checking the " - "activity of terminal {}: {}".format(name, e)) + self.log.exception( + "The following exception was encountered while checking the " + "activity of terminal {}: {}".format(name, e) + ) async def _cull_inactive_terminal(self, name): try: @@ -138,13 +153,15 @@ async def _cull_inactive_terminal(self, name): return # KeyErrors are somewhat expected since the terminal can be terminated as the culling check is made. self.log.debug("name=%s, last_activity=%s", name, term.last_activity) - if hasattr(term, 'last_activity'): + if hasattr(term, "last_activity"): dt_now = utcnow() dt_inactive = dt_now - term.last_activity # Compute idle properties is_time = dt_inactive > timedelta(seconds=self.cull_inactive_timeout) # Cull the kernel if all three criteria are met - if (is_time): + if is_time: inactivity = int(dt_inactive.total_seconds()) - self.log.warning("Culling terminal '%s' due to %s seconds of inactivity.", name, inactivity) + self.log.warning( + "Culling terminal '%s' due to %s seconds of inactivity.", name, inactivity + ) await self.terminate(name, force=True) diff --git a/jupyter_server/tests/auth/test_login.py b/jupyter_server/tests/auth/test_login.py index ebf2a5a1d2..6f1c358cc4 100644 --- a/jupyter_server/tests/auth/test_login.py +++ b/jupyter_server/tests/auth/test_login.py @@ -1,11 +1,11 @@ """Tests for login redirects""" - from functools import partial from urllib.parse import urlencode import pytest from tornado.httpclient import HTTPClientError -from tornado.httputil import url_concat, parse_cookie +from tornado.httputil import parse_cookie +from tornado.httputil import url_concat from jupyter_server.utils import url_path_join diff --git a/jupyter_server/tests/auth/test_security.py b/jupyter_server/tests/auth/test_security.py index ef7943401e..ed6d62a5cc 100644 --- a/jupyter_server/tests/auth/test_security.py +++ b/jupyter_server/tests/auth/test_security.py @@ -1,30 +1,31 @@ - -from jupyter_server.auth.security import passwd, passwd_check +from jupyter_server.auth.security import passwd +from jupyter_server.auth.security import passwd_check def test_passwd_structure(): - p = passwd('passphrase') - algorithm, hashed = p.split(':') - assert algorithm == 'argon2', algorithm - assert hashed.startswith('$argon2id$'), hashed + p = passwd("passphrase") + algorithm, hashed = p.split(":") + assert algorithm == "argon2", algorithm + assert hashed.startswith("$argon2id$"), hashed def test_roundtrip(): - p = passwd('passphrase') - assert passwd_check(p, 'passphrase') + p = passwd("passphrase") + assert passwd_check(p, "passphrase") def test_bad(): - p = passwd('passphrase') + p = passwd("passphrase") assert not passwd_check(p, p) - assert not passwd_check(p, 'a:b:c:d') - assert not passwd_check(p, 'a:b') + assert not passwd_check(p, "a:b:c:d") + assert not passwd_check(p, "a:b") def test_passwd_check_unicode(): # GH issue #4524 - phash = u'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f' + phash = u"sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f" assert passwd_check(phash, u"łeÂļŧ←↓→") - phash = (u'argon2:$argon2id$v=19$m=10240,t=10,p=8$' - u'qjjDiZUofUVVnrVYxacnbA$l5pQq1bJ8zglGT2uXP6iOg') + phash = ( + u"argon2:$argon2id$v=19$m=10240,t=10,p=8$" u"qjjDiZUofUVVnrVYxacnbA$l5pQq1bJ8zglGT2uXP6iOg" + ) assert passwd_check(phash, u"łeÂļŧ←↓→") diff --git a/jupyter_server/tests/extension/conftest.py b/jupyter_server/tests/extension/conftest.py index bc232473fb..af7b714c37 100644 --- a/jupyter_server/tests/extension/conftest.py +++ b/jupyter_server/tests/extension/conftest.py @@ -1,4 +1,5 @@ import pytest + from .mockextensions.app import MockExtensionApp @@ -25,10 +26,9 @@ """ - @pytest.fixture def mock_template(jp_template_dir): - index = jp_template_dir.joinpath('index.html') + index = jp_template_dir.joinpath("index.html") index.write_text(mock_html) diff --git a/jupyter_server/tests/extension/mockextensions/__init__.py b/jupyter_server/tests/extension/mockextensions/__init__.py index 8b56df64db..d821824771 100644 --- a/jupyter_server/tests/extension/mockextensions/__init__.py +++ b/jupyter_server/tests/extension/mockextensions/__init__.py @@ -8,17 +8,8 @@ # by the test functions. def _jupyter_server_extension_points(): return [ - { - 'module': 'jupyter_server.tests.extension.mockextensions.app', - 'app': MockExtensionApp - }, - { - 'module': 'jupyter_server.tests.extension.mockextensions.mock1' - }, - { - 'module': 'jupyter_server.tests.extension.mockextensions.mock2' - }, - { - 'module': 'jupyter_server.tests.extension.mockextensions.mock3' - } + {"module": "jupyter_server.tests.extension.mockextensions.app", "app": MockExtensionApp}, + {"module": "jupyter_server.tests.extension.mockextensions.mock1"}, + {"module": "jupyter_server.tests.extension.mockextensions.mock2"}, + {"module": "jupyter_server.tests.extension.mockextensions.mock3"}, ] diff --git a/jupyter_server/tests/extension/mockextensions/app.py b/jupyter_server/tests/extension/mockextensions/app.py index 31a2d14b12..7045417b23 100644 --- a/jupyter_server/tests/extension/mockextensions/app.py +++ b/jupyter_server/tests/extension/mockextensions/app.py @@ -1,56 +1,44 @@ import os -from traitlets import Unicode, List + +from traitlets import List +from traitlets import Unicode from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.extension.application import ( - ExtensionApp, - ExtensionAppJinjaMixin -) -from jupyter_server.extension.handler import ( - ExtensionHandlerMixin, - ExtensionHandlerJinjaMixin -) +from jupyter_server.extension.application import ExtensionApp +from jupyter_server.extension.application import ExtensionAppJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin +from jupyter_server.extension.handler import ExtensionHandlerMixin STATIC_PATH = os.path.join(os.path.dirname(__file__), "static") # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_points(): - return [ - { - 'module': __name__, - 'app': MockExtensionApp - } - ] + return [{"module": __name__, "app": MockExtensionApp}] -class MockExtensionHandler(ExtensionHandlerMixin, JupyterHandler): +class MockExtensionHandler(ExtensionHandlerMixin, JupyterHandler): def get(self): self.finish(self.config.mock_trait) class MockExtensionTemplateHandler( - ExtensionHandlerJinjaMixin, - ExtensionHandlerMixin, - JupyterHandler + ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler ): - def get(self): self.write(self.render_template("index.html")) class MockExtensionApp(ExtensionAppJinjaMixin, ExtensionApp): - name = 'mockextension' + name = "mockextension" template_paths = List().tag(config=True) static_paths = [STATIC_PATH] - mock_trait = Unicode('mock trait', config=True) + mock_trait = Unicode("mock trait", config=True) loaded = False serverapp_config = { - "jpserver_extensions": { - "jupyter_server.tests.extension.mockextensions.mock1": True - } + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions.mock1": True} } @staticmethod @@ -58,8 +46,8 @@ def get_extension_package(): return "jupyter_server.tests.extension.mockextensions" def initialize_handlers(self): - self.handlers.append(('/mock', MockExtensionHandler)) - self.handlers.append(('/mock_template', MockExtensionTemplateHandler)) + self.handlers.append(("/mock", MockExtensionHandler)) + self.handlers.append(("/mock_template", MockExtensionTemplateHandler)) self.loaded = True diff --git a/jupyter_server/tests/extension/mockextensions/mock1.py b/jupyter_server/tests/extension/mockextensions/mock1.py index 5de188c425..49f6d77a26 100644 --- a/jupyter_server/tests/extension/mockextensions/mock1.py +++ b/jupyter_server/tests/extension/mockextensions/mock1.py @@ -1,16 +1,10 @@ """A mock extension named `mock1` for testing purposes. """ - - # by the test functions. def _jupyter_server_extension_paths(): - return [ - { - 'module': 'jupyter_server.tests.extension.mockextensions.mock1' - } - ] + return [{"module": "jupyter_server.tests.extension.mockextensions.mock1"}] def _load_jupyter_server_extension(serverapp): serverapp.mockI = True - serverapp.mock_shared = 'I' + serverapp.mock_shared = "I" diff --git a/jupyter_server/tests/extension/mockextensions/mock2.py b/jupyter_server/tests/extension/mockextensions/mock2.py index d7e8d5de1b..87b8f4591c 100644 --- a/jupyter_server/tests/extension/mockextensions/mock2.py +++ b/jupyter_server/tests/extension/mockextensions/mock2.py @@ -1,16 +1,10 @@ """A mock extension named `mock2` for testing purposes. """ - - # by the test functions. def _jupyter_server_extension_paths(): - return [ - { - 'module': 'jupyter_server.tests.extension.mockextensions.mock2' - } - ] + return [{"module": "jupyter_server.tests.extension.mockextensions.mock2"}] def _load_jupyter_server_extension(serverapp): serverapp.mockII = True - serverapp.mock_shared = 'II' + serverapp.mock_shared = "II" diff --git a/jupyter_server/tests/extension/mockextensions/mock3.py b/jupyter_server/tests/extension/mockextensions/mock3.py index 233f492a60..cdbffefdfa 100644 --- a/jupyter_server/tests/extension/mockextensions/mock3.py +++ b/jupyter_server/tests/extension/mockextensions/mock3.py @@ -3,4 +3,4 @@ def _load_jupyter_server_extension(serverapp): - pass \ No newline at end of file + pass diff --git a/jupyter_server/tests/extension/mockextensions/mockext_both.py b/jupyter_server/tests/extension/mockextensions/mockext_both.py index 5346ae3c95..38076e5c2d 100644 --- a/jupyter_server/tests/extension/mockextensions/mockext_both.py +++ b/jupyter_server/tests/extension/mockextensions/mockext_both.py @@ -1,16 +1,10 @@ """A mock extension named `mockext_both` for testing purposes. """ - - # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_paths(): - return [ - { - 'module': 'jupyter_server.tests.extension.mockextensions.mockext_both' - } - ] + return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_both"}] def _load_jupyter_server_extension(serverapp): - pass \ No newline at end of file + pass diff --git a/jupyter_server/tests/extension/mockextensions/mockext_py.py b/jupyter_server/tests/extension/mockextensions/mockext_py.py index 7376438678..d0cf69b38c 100644 --- a/jupyter_server/tests/extension/mockextensions/mockext_py.py +++ b/jupyter_server/tests/extension/mockextensions/mockext_py.py @@ -1,16 +1,10 @@ """A mock extension named `mockext_py` for testing purposes. """ - - # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_paths(): - return [ - { - 'module': 'jupyter_server.tests.extension.mockextensions.mockext_py' - } - ] + return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_py"}] def _load_jupyter_server_extension(serverapp): - pass \ No newline at end of file + pass diff --git a/jupyter_server/tests/extension/mockextensions/mockext_sys.py b/jupyter_server/tests/extension/mockextensions/mockext_sys.py index 915b0f6254..70506e2fe3 100644 --- a/jupyter_server/tests/extension/mockextensions/mockext_sys.py +++ b/jupyter_server/tests/extension/mockextensions/mockext_sys.py @@ -1,16 +1,10 @@ """A mock extension named `mockext_py` for testing purposes. """ - - # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_paths(): - return [ - { - 'module': 'jupyter_server.tests.extension.mockextensions.mockext_sys' - } - ] + return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_sys"}] def _load_jupyter_server_extension(serverapp): - pass \ No newline at end of file + pass diff --git a/jupyter_server/tests/extension/mockextensions/mockext_user.py b/jupyter_server/tests/extension/mockextensions/mockext_user.py index b234ea6c50..c1e8eafebc 100644 --- a/jupyter_server/tests/extension/mockextensions/mockext_user.py +++ b/jupyter_server/tests/extension/mockextensions/mockext_user.py @@ -1,16 +1,10 @@ """A mock extension named `mockext_user` for testing purposes. """ - - # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_paths(): - return [ - { - 'module': 'jupyter_server.tests.extension.mockextensions.mockext_user' - } - ] + return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_user"}] def _load_jupyter_server_extension(serverapp): - pass \ No newline at end of file + pass diff --git a/jupyter_server/tests/extension/mockextensions/static/mock.txt b/jupyter_server/tests/extension/mockextensions/static/mock.txt index 95d2861de0..36dd88b8c4 100644 --- a/jupyter_server/tests/extension/mockextensions/static/mock.txt +++ b/jupyter_server/tests/extension/mockextensions/static/mock.txt @@ -1 +1 @@ -mock static content \ No newline at end of file +mock static content diff --git a/jupyter_server/tests/extension/test_app.py b/jupyter_server/tests/extension/test_app.py index fe83d24ba2..fc3f1fe25e 100644 --- a/jupyter_server/tests/extension/test_app.py +++ b/jupyter_server/tests/extension/test_app.py @@ -1,24 +1,18 @@ import pytest from traitlets.config import Config + +from .mockextensions.app import MockExtensionApp from jupyter_server.serverapp import ServerApp from jupyter_server.utils import run_sync -from .mockextensions.app import MockExtensionApp @pytest.fixture def jp_server_config(jp_template_dir): config = { "ServerApp": { - "jpserver_extensions": { - "jupyter_server.tests.extension.mockextensions": True - }, + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True}, }, - "MockExtensionApp": { - "template_paths": [ - str(jp_template_dir) - ], - "log_level": 'DEBUG' - } + "MockExtensionApp": {"template_paths": [str(jp_template_dir)], "log_level": "DEBUG"}, } return config @@ -41,14 +35,8 @@ def test_initialize(jp_serverapp, jp_template_dir, mock_extension): @pytest.mark.parametrize( - 'trait_name, trait_value, jp_argv', - ( - [ - 'mock_trait', - 'test mock trait', - ['--MockExtensionApp.mock_trait=test mock trait'] - ], - ) + "trait_name, trait_value, jp_argv", + (["mock_trait", "test mock trait", ["--MockExtensionApp.mock_trait=test mock trait"]],), ) def test_instance_creation_with_argv( trait_name, @@ -67,41 +55,39 @@ def test_extensionapp_load_config_file( # Assert default config_file_paths is the same in the app and extension. assert mock_extension.config_file_paths == jp_serverapp.config_file_paths assert mock_extension.config_dir == jp_serverapp.config_dir - assert mock_extension.config_file_name == 'jupyter_mockextension_config' + assert mock_extension.config_file_name == "jupyter_mockextension_config" # Assert that the trait is updated by config file - assert mock_extension.mock_trait == 'config from file' + assert mock_extension.mock_trait == "config from file" OPEN_BROWSER_COMBINATIONS = ( (True, {}), - (True, {'ServerApp': {'open_browser': True}}), - (False, {'ServerApp': {'open_browser': False}}), - (True, {'MockExtensionApp': {'open_browser': True}}), - (False, {'MockExtensionApp': {'open_browser': False}}), - (True, {'ServerApp': {'open_browser': True}, 'MockExtensionApp': {'open_browser': True}}), - (False, {'ServerApp': {'open_browser': True}, 'MockExtensionApp': {'open_browser': False}}), - (True, {'ServerApp': {'open_browser': False}, 'MockExtensionApp': {'open_browser': True}}), - (False, {'ServerApp': {'open_browser': False}, 'MockExtensionApp': {'open_browser': False}}), + (True, {"ServerApp": {"open_browser": True}}), + (False, {"ServerApp": {"open_browser": False}}), + (True, {"MockExtensionApp": {"open_browser": True}}), + (False, {"MockExtensionApp": {"open_browser": False}}), + (True, {"ServerApp": {"open_browser": True}, "MockExtensionApp": {"open_browser": True}}), + (False, {"ServerApp": {"open_browser": True}, "MockExtensionApp": {"open_browser": False}}), + (True, {"ServerApp": {"open_browser": False}, "MockExtensionApp": {"open_browser": True}}), + (False, {"ServerApp": {"open_browser": False}, "MockExtensionApp": {"open_browser": False}}), ) -@pytest.mark.parametrize( - 'expected_value, config', OPEN_BROWSER_COMBINATIONS -) + +@pytest.mark.parametrize("expected_value, config", OPEN_BROWSER_COMBINATIONS) def test_browser_open(monkeypatch, jp_environ, config, expected_value): serverapp = MockExtensionApp.initialize_server(config=Config(config)) assert serverapp.open_browser == expected_value - def test_load_parallel_extensions(monkeypatch, jp_environ): serverapp = MockExtensionApp.initialize_server() exts = serverapp.extension_manager.extensions - assert 'jupyter_server.tests.extension.mockextensions.mock1' in exts - assert 'jupyter_server.tests.extension.mockextensions' in exts + assert "jupyter_server.tests.extension.mockextensions.mock1" in exts + assert "jupyter_server.tests.extension.mockextensions" in exts exts = serverapp.jpserver_extensions - assert exts['jupyter_server.tests.extension.mockextensions.mock1'] - assert exts['jupyter_server.tests.extension.mockextensions'] + assert exts["jupyter_server.tests.extension.mockextensions.mock1"] + assert exts["jupyter_server.tests.extension.mockextensions"] def test_stop_extension(jp_serverapp, caplog): @@ -113,15 +99,14 @@ def test_stop_extension(jp_serverapp, caplog): # load extensions (make sure we only have the one extension loaded jp_serverapp.extension_manager.load_all_extensions(jp_serverapp) - extension_name = 'jupyter_server.tests.extension.mockextensions' - assert list(jp_serverapp.extension_manager.extension_apps) == [ - extension_name - ] + extension_name = "jupyter_server.tests.extension.mockextensions" + assert list(jp_serverapp.extension_manager.extension_apps) == [extension_name] # add a stop_extension method for the extension app async def _stop(*args): nonlocal calls calls += 1 + for apps in jp_serverapp.extension_manager.extension_apps.values(): for app in apps: if app: @@ -130,11 +115,8 @@ async def _stop(*args): # call cleanup_extensions, check the logging is correct caplog.clear() run_sync(jp_serverapp.cleanup_extensions()) - assert [ - msg - for *_, msg in caplog.record_tuples - ] == [ - 'Shutting down 1 extension', + assert [msg for *_, msg in caplog.record_tuples] == [ + "Shutting down 1 extension", '{} | extension app "mockextension" stopping'.format(extension_name), '{} | extension app "mockextension" stopped'.format(extension_name), ] diff --git a/jupyter_server/tests/extension/test_config.py b/jupyter_server/tests/extension/test_config.py index 092c72e378..8669697d8a 100644 --- a/jupyter_server/tests/extension/test_config.py +++ b/jupyter_server/tests/extension/test_config.py @@ -1,6 +1,6 @@ import pytest - from jupyter_core.paths import jupyter_config_path + from jupyter_server.extension.config import ( ExtensionConfigManager, ) @@ -14,7 +14,7 @@ @pytest.fixture def configd(jp_env_config_path): """A pathlib.Path object that acts like a jupyter_server_config.d folder.""" - configd = jp_env_config_path.joinpath('jupyter_server_config.d') + configd = jp_env_config_path.joinpath("jupyter_server_config.d") configd.mkdir() return configd @@ -29,6 +29,7 @@ def configd(jp_env_config_path): } """ + @pytest.fixture def ext1_config(configd): config = configd.joinpath("ext1_config.json") @@ -53,9 +54,7 @@ def ext2_config(configd): def test_list_extension_from_configd(ext1_config, ext2_config): - manager = ExtensionConfigManager( - read_config_path=jupyter_config_path() - ) + manager = ExtensionConfigManager(read_config_path=jupyter_config_path()) extensions = manager.get_jpserver_extensions() assert "ext2_config" in extensions - assert "ext1_config" in extensions \ No newline at end of file + assert "ext1_config" in extensions diff --git a/jupyter_server/tests/extension/test_entrypoint.py b/jupyter_server/tests/extension/test_entrypoint.py index 97af9ad17b..5f7d10de80 100644 --- a/jupyter_server/tests/extension/test_entrypoint.py +++ b/jupyter_server/tests/extension/test_entrypoint.py @@ -2,14 +2,14 @@ # All test coroutines will be treated as marked. -pytestmark = pytest.mark.script_launch_mode('subprocess') +pytestmark = pytest.mark.script_launch_mode("subprocess") def test_server_extension_list(jp_environ, script_runner): ret = script_runner.run( - 'jupyter', - 'server', - 'extension', - 'list', + "jupyter", + "server", + "extension", + "list", ) assert ret.success diff --git a/jupyter_server/tests/extension/test_handler.py b/jupyter_server/tests/extension/test_handler.py index 7084c0804c..d920c66bf7 100644 --- a/jupyter_server/tests/extension/test_handler.py +++ b/jupyter_server/tests/extension/test_handler.py @@ -4,112 +4,82 @@ @pytest.fixture def jp_server_config(jp_template_dir): return { - "ServerApp": { - "jpserver_extensions": { - "jupyter_server.tests.extension.mockextensions": True - } - }, - "MockExtensionApp": { - "template_paths": [ - str(jp_template_dir) - ] - } - } + "ServerApp": { + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True} + }, + "MockExtensionApp": {"template_paths": [str(jp_template_dir)]}, + } async def test_handler(jp_fetch): - r = await jp_fetch( - 'mock', - method='GET' - ) + r = await jp_fetch("mock", method="GET") assert r.code == 200 - assert r.body.decode() == 'mock trait' + assert r.body.decode() == "mock trait" async def test_handler_template(jp_fetch, mock_template): - r = await jp_fetch( - 'mock_template', - method='GET' - ) + r = await jp_fetch("mock_template", method="GET") assert r.code == 200 @pytest.mark.parametrize( - 'jp_server_config', + "jp_server_config", [ { "ServerApp": { - "jpserver_extensions": { - "jupyter_server.tests.extension.mockextensions": True - } + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True} }, "MockExtensionApp": { # Change a trait in the MockExtensionApp using # the following config value. "mock_trait": "test mock trait" - } + }, } - ] + ], ) async def test_handler_setting(jp_fetch, jp_server_config): # Test that the extension trait was picked up by the webapp. - r = await jp_fetch( - 'mock', - method='GET' - ) + r = await jp_fetch("mock", method="GET") assert r.code == 200 - assert r.body.decode() == 'test mock trait' + assert r.body.decode() == "test mock trait" -@pytest.mark.parametrize( - 'jp_argv', (['--MockExtensionApp.mock_trait=test mock trait'],) -) +@pytest.mark.parametrize("jp_argv", (["--MockExtensionApp.mock_trait=test mock trait"],)) async def test_handler_argv(jp_fetch, jp_argv): # Test that the extension trait was picked up by the webapp. - r = await jp_fetch( - 'mock', - method='GET' - ) + r = await jp_fetch("mock", method="GET") assert r.code == 200 - assert r.body.decode() == 'test mock trait' + assert r.body.decode() == "test mock trait" @pytest.mark.parametrize( - 'jp_server_config,jp_base_url', + "jp_server_config,jp_base_url", [ ( { "ServerApp": { - "jpserver_extensions": { - "jupyter_server.tests.extension.mockextensions": True - }, + "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True}, # Move extension handlers behind a url prefix - "base_url": "test_prefix" + "base_url": "test_prefix", }, "MockExtensionApp": { # Change a trait in the MockExtensionApp using # the following config value. "mock_trait": "test mock trait" - } + }, }, - '/test_prefix/' + "/test_prefix/", ) - ] + ], ) async def test_base_url(jp_fetch, jp_server_config, jp_base_url): # Test that the extension's handlers were properly prefixed - r = await jp_fetch( - 'mock', - method='GET' - ) + r = await jp_fetch("mock", method="GET") assert r.code == 200 - assert r.body.decode() == 'test mock trait' + assert r.body.decode() == "test mock trait" # Test that the static namespace was prefixed by base_url - r = await jp_fetch( - 'static', 'mockextension', 'mock.txt', - method='GET' - ) + r = await jp_fetch("static", "mockextension", "mock.txt", method="GET") assert r.code == 200 body = r.body.decode() assert "mock static content" in body diff --git a/jupyter_server/tests/extension/test_launch.py b/jupyter_server/tests/extension/test_launch.py index 343ffa5cbe..e5cc12e7a7 100644 --- a/jupyter_server/tests/extension/test_launch.py +++ b/jupyter_server/tests/extension/test_launch.py @@ -1,14 +1,15 @@ """Test launching Jupyter Server Applications through as ExtensionApp launch_instance. """ -from pathlib import Path import os +import subprocess import sys import time +from binascii import hexlify +from pathlib import Path + import pytest -import subprocess import requests -from binascii import hexlify HERE = os.path.dirname(os.path.abspath(__file__)) @@ -26,9 +27,7 @@ def token(): @pytest.fixture def auth_header(token): - return { - 'Authorization': 'token %s' % token - } + return {"Authorization": "token %s" % token} def wait_up(url, interval=0.1, check=None): @@ -38,7 +37,7 @@ def wait_up(url, interval=0.1, check=None): except Exception: if check: assert check() - #print("waiting for %s" % url) + # print("waiting for %s" % url) time.sleep(interval) else: break @@ -47,7 +46,6 @@ def wait_up(url, interval=0.1, check=None): @pytest.fixture def launch_instance(request, port, token): def _run_in_subprocess(argv=[], add_token=True): - def _kill_extension_app(): try: process.terminate() @@ -59,17 +57,21 @@ def _kill_extension_app(): if add_token: f'--ServerApp.token="{token}"', - process = subprocess.Popen([ - sys.executable, '-m', - 'mockextensions.app', - f'--port={port}', - '--ip=127.0.0.1', - '--no-browser', - *argv, - ], cwd=HERE) + process = subprocess.Popen( + [ + sys.executable, + "-m", + "mockextensions.app", + f"--port={port}", + "--ip=127.0.0.1", + "--no-browser", + *argv, + ], + cwd=HERE, + ) request.addfinalizer(_kill_extension_app) - url = f'http://127.0.0.1:{port}' + url = f"http://127.0.0.1:{port}" wait_up(url, check=lambda: process.poll() is None) return process @@ -81,30 +83,29 @@ def fetch(port, auth_header): def _get(endpoint): url = f"http://127.0.0.1:{port}" + endpoint return requests.get(url, headers=auth_header) + return _get def test_launch_instance(launch_instance, fetch): launch_instance() - r = fetch('/mock') + r = fetch("/mock") assert r.status_code == 200 def test_base_url(launch_instance, fetch): - launch_instance(['--ServerApp.base_url=/foo']) + launch_instance(["--ServerApp.base_url=/foo"]) r = fetch("/foo/mock") assert r.status_code == 200 def test_token_file(launch_instance, fetch, token): - token_file = HERE / Path('token_file.txt') - os.environ['JUPYTER_TOKEN_FILE'] = str(token_file) - token_file.write_text(token, encoding='utf-8') + token_file = HERE / Path("token_file.txt") + os.environ["JUPYTER_TOKEN_FILE"] = str(token_file) + token_file.write_text(token, encoding="utf-8") launch_instance(add_token=False) r = fetch("/mock") - del os.environ['JUPYTER_TOKEN_FILE'] + del os.environ["JUPYTER_TOKEN_FILE"] token_file.unlink() assert r.status_code == 200 - - diff --git a/jupyter_server/tests/extension/test_manager.py b/jupyter_server/tests/extension/test_manager.py index 310a058fdc..847e0dbb43 100644 --- a/jupyter_server/tests/extension/test_manager.py +++ b/jupyter_server/tests/extension/test_manager.py @@ -1,16 +1,13 @@ import os import pytest - from jupyter_core.paths import jupyter_config_path -from jupyter_server.extension.manager import ( - ExtensionPoint, - ExtensionPackage, - ExtensionManager, - ExtensionMetadataError, - ExtensionModuleNotFound -) +from jupyter_server.extension.manager import ExtensionManager +from jupyter_server.extension.manager import ExtensionMetadataError +from jupyter_server.extension.manager import ExtensionModuleNotFound +from jupyter_server.extension.manager import ExtensionPackage +from jupyter_server.extension.manager import ExtensionPoint # Use ServerApps environment because it monkeypatches # jupyter_core.paths and provides a config directory @@ -60,7 +57,7 @@ def test_extension_package_api(): path1 = metadata_list[0] app = path1["app"] - e = ExtensionPackage(name='jupyter_server.tests.extension.mockextensions') + e = ExtensionPackage(name="jupyter_server.tests.extension.mockextensions") e.extension_points assert hasattr(e, "extension_points") assert len(e.extension_points) == len(metadata_list) @@ -78,12 +75,10 @@ def _normalize_path(path_list): def test_extension_manager_api(): - jpserver_extensions = { - "jupyter_server.tests.extension.mockextensions": True - } + jpserver_extensions = {"jupyter_server.tests.extension.mockextensions": True} manager = ExtensionManager() assert manager.config_manager - expected = _normalize_path(os.path.join(jupyter_config_path()[0], 'serverconfig')) + expected = _normalize_path(os.path.join(jupyter_config_path()[0], "serverconfig")) assert _normalize_path(manager.config_manager.read_config_path[0]) == expected manager.from_jpserver_extensions(jpserver_extensions) assert len(manager.extensions) == 1 diff --git a/jupyter_server/tests/extension/test_serverextension.py b/jupyter_server/tests/extension/test_serverextension.py index 6ee78684c8..0e7ed45893 100644 --- a/jupyter_server/tests/extension/test_serverextension.py +++ b/jupyter_server/tests/extension/test_serverextension.py @@ -1,12 +1,11 @@ -import pytest from collections import OrderedDict + +import pytest from traitlets.tests.utils import check_help_all_output -from jupyter_server.extension.serverextension import ( - toggle_server_extension_python, - _get_config_dir -) from jupyter_server.config_manager import BaseJSONConfigManager +from jupyter_server.extension.serverextension import _get_config_dir +from jupyter_server.extension.serverextension import toggle_server_extension_python # Use ServerApps environment because it monkeypatches @@ -16,11 +15,11 @@ def test_help_output(): - check_help_all_output('jupyter_server.extension.serverextension') - check_help_all_output('jupyter_server.extension.serverextension', ['enable']) - check_help_all_output('jupyter_server.extension.serverextension', ['disable']) - check_help_all_output('jupyter_server.extension.serverextension', ['install']) - check_help_all_output('jupyter_server.extension.serverextension', ['uninstall']) + check_help_all_output("jupyter_server.extension.serverextension") + check_help_all_output("jupyter_server.extension.serverextension", ["enable"]) + check_help_all_output("jupyter_server.extension.serverextension", ["disable"]) + check_help_all_output("jupyter_server.extension.serverextension", ["install"]) + check_help_all_output("jupyter_server.extension.serverextension", ["uninstall"]) def get_config(sys_prefix=True): @@ -30,83 +29,70 @@ def get_config(sys_prefix=True): def test_enable(jp_env_config_path, jp_extension_environ): - toggle_server_extension_python('mock1', True) + toggle_server_extension_python("mock1", True) config = get_config() - assert config['mock1'] + assert config["mock1"] def test_disable(jp_env_config_path, jp_extension_environ): - toggle_server_extension_python('mock1', True) - toggle_server_extension_python('mock1', False) + toggle_server_extension_python("mock1", True) + toggle_server_extension_python("mock1", False) config = get_config() - assert not config['mock1'] + assert not config["mock1"] -def test_merge_config( - jp_env_config_path, - jp_configurable_serverapp, - jp_extension_environ -): +def test_merge_config(jp_env_config_path, jp_configurable_serverapp, jp_extension_environ): # Toggle each extension module with a JSON config file # at the sys-prefix config dir. toggle_server_extension_python( - 'jupyter_server.tests.extension.mockextensions.mockext_sys', - enabled=True, - sys_prefix=True + "jupyter_server.tests.extension.mockextensions.mockext_sys", enabled=True, sys_prefix=True ) toggle_server_extension_python( - 'jupyter_server.tests.extension.mockextensions.mockext_user', - enabled=True, - user=True + "jupyter_server.tests.extension.mockextensions.mockext_user", enabled=True, user=True ) # Write this configuration in two places, sys-prefix and user. # sys-prefix supercedes users, so the extension should be disabled # when these two configs merge. toggle_server_extension_python( - 'jupyter_server.tests.extension.mockextensions.mockext_both', - enabled=True, - sys_prefix=True + "jupyter_server.tests.extension.mockextensions.mockext_both", enabled=True, sys_prefix=True ) toggle_server_extension_python( - 'jupyter_server.tests.extension.mockextensions.mockext_both', - enabled=False, - user=True + "jupyter_server.tests.extension.mockextensions.mockext_both", enabled=False, user=True ) arg = "--ServerApp.jpserver_extensions={{'{mockext_py}': True}}".format( - mockext_py='jupyter_server.tests.extension.mockextensions.mockext_py' + mockext_py="jupyter_server.tests.extension.mockextensions.mockext_py" ) # Enable the last extension, mockext_py, using the CLI interface. - app = jp_configurable_serverapp( - config_dir=str(jp_env_config_path), - argv=[arg] - ) + app = jp_configurable_serverapp(config_dir=str(jp_env_config_path), argv=[arg]) # Verify that extensions are enabled and merged in proper order. extensions = app.jpserver_extensions - assert extensions['jupyter_server.tests.extension.mockextensions.mockext_user'] - assert extensions['jupyter_server.tests.extension.mockextensions.mockext_sys'] - assert extensions['jupyter_server.tests.extension.mockextensions.mockext_py'] + assert extensions["jupyter_server.tests.extension.mockextensions.mockext_user"] + assert extensions["jupyter_server.tests.extension.mockextensions.mockext_sys"] + assert extensions["jupyter_server.tests.extension.mockextensions.mockext_py"] # Merging should causes this extension to be disabled. - assert not extensions['jupyter_server.tests.extension.mockextensions.mockext_both'] + assert not extensions["jupyter_server.tests.extension.mockextensions.mockext_both"] @pytest.mark.parametrize( - 'jp_server_config', + "jp_server_config", [ { "ServerApp": { - "jpserver_extensions": OrderedDict([ - ('jupyter_server.tests.extension.mockextensions.mock2', True), - ('jupyter_server.tests.extension.mockextensions.mock1', True) - ]) + "jpserver_extensions": OrderedDict( + [ + ("jupyter_server.tests.extension.mockextensions.mock2", True), + ("jupyter_server.tests.extension.mockextensions.mock1", True), + ] + ) } } - ] + ], ) def test_load_ordered(jp_serverapp, jp_server_config): assert jp_serverapp.mockII is True, "Mock II should have been loaded" assert jp_serverapp.mockI is True, "Mock I should have been loaded" - assert jp_serverapp.mock_shared == 'II', "Mock II should be loaded after Mock I" + assert jp_serverapp.mock_shared == "II", "Mock II should be loaded after Mock I" diff --git a/jupyter_server/tests/extension/test_utils.py b/jupyter_server/tests/extension/test_utils.py index 7e228e5cac..425b6bad9e 100644 --- a/jupyter_server/tests/extension/test_utils.py +++ b/jupyter_server/tests/extension/test_utils.py @@ -1,4 +1,5 @@ import pytest + from jupyter_server.extension.utils import validate_extension @@ -10,10 +11,10 @@ def test_validate_extension(): # enabled at sys level - assert validate_extension('jupyter_server.tests.extension.mockextensions.mockext_sys') + assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_sys") # enabled at sys, disabled at user - assert validate_extension('jupyter_server.tests.extension.mockextensions.mockext_both') + assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_both") # enabled at user - assert validate_extension('jupyter_server.tests.extension.mockextensions.mockext_user') + assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_user") # enabled at Python - assert validate_extension('jupyter_server.tests.extension.mockextensions.mockext_py') \ No newline at end of file + assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_py") diff --git a/jupyter_server/tests/nbconvert/test_handlers.py b/jupyter_server/tests/nbconvert/test_handlers.py index 9b5dfce6aa..1343b397e5 100644 --- a/jupyter_server/tests/nbconvert/test_handlers.py +++ b/jupyter_server/tests/nbconvert/test_handlers.py @@ -1,147 +1,130 @@ # coding: utf-8 import json - -import tornado - -from nbformat import writes -from nbformat.v4 import ( - new_notebook, new_markdown_cell, new_code_cell, new_output, -) - -from shutil import which - - from base64 import encodebytes +from shutil import which import pytest +import tornado +from nbformat import writes +from nbformat.v4 import new_code_cell +from nbformat.v4 import new_markdown_cell +from nbformat.v4 import new_notebook +from nbformat.v4 import new_output from ..utils import expected_http_error -png_green_pixel = encodebytes(b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00' -b'\x00\x00\x01\x00\x00x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDAT' -b'\x08\xd7c\x90\xfb\xcf\x00\x00\x02\\\x01\x1e.~d\x87\x00\x00\x00\x00IEND\xaeB`\x82' -).decode('ascii') +png_green_pixel = encodebytes( + b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00" + b"\x00\x00\x01\x00\x00x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDAT" + b"\x08\xd7c\x90\xfb\xcf\x00\x00\x02\\\x01\x1e.~d\x87\x00\x00\x00\x00IEND\xaeB`\x82" +).decode("ascii") @pytest.fixture def notebook(jp_root_dir): # Build sub directory. - subdir = jp_root_dir / 'foo' - if not jp_root_dir.joinpath('foo').is_dir(): + subdir = jp_root_dir / "foo" + if not jp_root_dir.joinpath("foo").is_dir(): subdir.mkdir() # Build a notebook programmatically. nb = new_notebook() - nb.cells.append(new_markdown_cell(u'Created by test Âŗ')) - cc1 = new_code_cell(source=u'print(2*6)') - cc1.outputs.append(new_output(output_type="stream", text=u'12')) - cc1.outputs.append(new_output(output_type="execute_result", - data={'image/png' : png_green_pixel}, - execution_count=1, - )) + nb.cells.append(new_markdown_cell(u"Created by test Âŗ")) + cc1 = new_code_cell(source=u"print(2*6)") + cc1.outputs.append(new_output(output_type="stream", text=u"12")) + cc1.outputs.append( + new_output( + output_type="execute_result", + data={"image/png": png_green_pixel}, + execution_count=1, + ) + ) nb.cells.append(cc1) # Write file to tmp dir. - nbfile = subdir / 'testnb.ipynb' - nbfile.write_text(writes(nb, version=4), encoding='utf-8') + nbfile = subdir / "testnb.ipynb" + nbfile.write_text(writes(nb, version=4), encoding="utf-8") -pytestmark = pytest.mark.skipif(not which('pandoc'), reason="Command 'pandoc' is not available") +pytestmark = pytest.mark.skipif(not which("pandoc"), reason="Command 'pandoc' is not available") async def test_from_file(jp_fetch, notebook): r = await jp_fetch( - 'nbconvert', 'html', 'foo', 'testnb.ipynb', - method='GET', - params={'download': False} + "nbconvert", "html", "foo", "testnb.ipynb", method="GET", params={"download": False} ) assert r.code == 200 - assert 'text/html' in r.headers['Content-Type'] - assert 'Created by test' in r.body.decode() - assert 'print' in r.body.decode() + assert "text/html" in r.headers["Content-Type"] + assert "Created by test" in r.body.decode() + assert "print" in r.body.decode() r = await jp_fetch( - 'nbconvert', 'python', 'foo', 'testnb.ipynb', - method='GET', - params={'download': False} + "nbconvert", "python", "foo", "testnb.ipynb", method="GET", params={"download": False} ) assert r.code == 200 - assert 'text/x-python' in r.headers['Content-Type'] - assert 'print(2*6)' in r.body.decode() + assert "text/x-python" in r.headers["Content-Type"] + assert "print(2*6)" in r.body.decode() async def test_from_file_404(jp_fetch, notebook): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( - 'nbconvert', 'html', 'foo', 'thisdoesntexist.ipynb', - method='GET', - params={'download': False} + "nbconvert", + "html", + "foo", + "thisdoesntexist.ipynb", + method="GET", + params={"download": False}, ) assert expected_http_error(e, 404) async def test_from_file_download(jp_fetch, notebook): r = await jp_fetch( - 'nbconvert', 'python', 'foo', 'testnb.ipynb', - method='GET', - params={'download': True} + "nbconvert", "python", "foo", "testnb.ipynb", method="GET", params={"download": True} ) - content_disposition = r.headers['Content-Disposition'] - assert 'attachment' in content_disposition - assert 'testnb.py' in content_disposition + content_disposition = r.headers["Content-Disposition"] + assert "attachment" in content_disposition + assert "testnb.py" in content_disposition async def test_from_file_zip(jp_fetch, notebook): r = await jp_fetch( - 'nbconvert', 'latex', 'foo', 'testnb.ipynb', - method='GET', - params={'download': True} + "nbconvert", "latex", "foo", "testnb.ipynb", method="GET", params={"download": True} ) - assert 'application/zip' in r.headers['Content-Type'] - assert '.zip' in r.headers['Content-Disposition'] + assert "application/zip" in r.headers["Content-Type"] + assert ".zip" in r.headers["Content-Disposition"] async def test_from_post(jp_fetch, notebook): r = await jp_fetch( - 'api/contents/foo/testnb.ipynb', - method='GET', + "api/contents/foo/testnb.ipynb", + method="GET", ) nbmodel = json.loads(r.body.decode()) - r = await jp_fetch( - 'nbconvert', 'html', - method='POST', - body=json.dumps(nbmodel) - ) + r = await jp_fetch("nbconvert", "html", method="POST", body=json.dumps(nbmodel)) assert r.code == 200 - assert 'text/html' in r.headers['Content-Type'] - assert 'Created by test' in r.body.decode() - assert 'print' in r.body.decode() + assert "text/html" in r.headers["Content-Type"] + assert "Created by test" in r.body.decode() + assert "print" in r.body.decode() - r = await jp_fetch( - 'nbconvert', 'python', - method='POST', - body=json.dumps(nbmodel) - ) + r = await jp_fetch("nbconvert", "python", method="POST", body=json.dumps(nbmodel)) assert r.code == 200 - assert u'text/x-python' in r.headers['Content-Type'] - assert 'print(2*6)'in r.body.decode() + assert u"text/x-python" in r.headers["Content-Type"] + assert "print(2*6)" in r.body.decode() async def test_from_post_zip(jp_fetch, notebook): r = await jp_fetch( - 'api/contents/foo/testnb.ipynb', - method='GET', + "api/contents/foo/testnb.ipynb", + method="GET", ) nbmodel = json.loads(r.body.decode()) - r = await jp_fetch( - 'nbconvert', 'latex', - method='POST', - body=json.dumps(nbmodel) - ) - assert 'application/zip' in r.headers['Content-Type'] - assert '.zip' in r.headers['Content-Disposition'] + r = await jp_fetch("nbconvert", "latex", method="POST", body=json.dumps(nbmodel)) + assert "application/zip" in r.headers["Content-Type"] + assert ".zip" in r.headers["Content-Disposition"] diff --git a/jupyter_server/tests/services/config/test_api.py b/jupyter_server/tests/services/config/test_api.py index 9f6533603e..9d4a4b2f01 100644 --- a/jupyter_server/tests/services/config/test_api.py +++ b/jupyter_server/tests/services/config/test_api.py @@ -2,64 +2,49 @@ async def test_create_retrieve_config(jp_fetch): - sample = {'foo': 'bar', 'baz': 73} - response = await jp_fetch( - 'api', 'config', 'example', - method='PUT', - body=json.dumps(sample) - ) + sample = {"foo": "bar", "baz": 73} + response = await jp_fetch("api", "config", "example", method="PUT", body=json.dumps(sample)) assert response.code == 204 response2 = await jp_fetch( - 'api', 'config', 'example', - method='GET', + "api", + "config", + "example", + method="GET", ) assert response2.code == 200 assert json.loads(response2.body.decode()) == sample async def test_modify(jp_fetch): - sample = { - 'foo': 'bar', - 'baz': 73, - 'sub': {'a': 6, 'b': 7}, - 'sub2': {'c': 8} - } + sample = {"foo": "bar", "baz": 73, "sub": {"a": 6, "b": 7}, "sub2": {"c": 8}} modified_sample = { - 'foo': None, # should delete foo - 'baz': 75, - 'wib': [1,2,3], - 'sub': {'a': 8, 'b': None, 'd': 9}, - 'sub2': {'c': None} # should delete sub2 + "foo": None, # should delete foo + "baz": 75, + "wib": [1, 2, 3], + "sub": {"a": 8, "b": None, "d": 9}, + "sub2": {"c": None}, # should delete sub2 } - diff = { - 'baz': 75, - 'wib': [1,2,3], - 'sub': {'a': 8, 'd': 9} - } + diff = {"baz": 75, "wib": [1, 2, 3], "sub": {"a": 8, "d": 9}} - await jp_fetch( - 'api', 'config', 'example', - method='PUT', - body=json.dumps(sample) - ) + await jp_fetch("api", "config", "example", method="PUT", body=json.dumps(sample)) response2 = await jp_fetch( - 'api', 'config', 'example', - method='PATCH', - body=json.dumps(modified_sample) + "api", "config", "example", method="PATCH", body=json.dumps(modified_sample) ) assert response2.code == 200 assert json.loads(response2.body.decode()) == diff - + async def test_get_unknown(jp_fetch): response = await jp_fetch( - 'api', 'config', 'nonexistant', - method='GET', + "api", + "config", + "nonexistant", + method="GET", ) assert response.code == 200 assert json.loads(response.body.decode()) == {} diff --git a/jupyter_server/tests/services/contents/test_api.py b/jupyter_server/tests/services/contents/test_api.py index 4d91c74422..cd0eda5513 100644 --- a/jupyter_server/tests/services/contents/test_api.py +++ b/jupyter_server/tests/services/contents/test_api.py @@ -1,48 +1,50 @@ -import sys import json import pathlib -import pytest +import sys +from base64 import decodebytes +from base64 import encodebytes +import pytest import tornado - -from nbformat import writes, from_dict -from nbformat.v4 import ( - new_notebook, new_markdown_cell, -) - -from jupyter_server.utils import url_path_join - -from base64 import encodebytes, decodebytes +from nbformat import from_dict +from nbformat import writes +from nbformat.v4 import new_markdown_cell +from nbformat.v4 import new_notebook from ...utils import expected_http_error +from jupyter_server.utils import url_path_join def notebooks_only(dir_model): - return [nb for nb in dir_model['content'] if nb['type']=='notebook'] + return [nb for nb in dir_model["content"] if nb["type"] == "notebook"] + def dirs_only(dir_model): - return [x for x in dir_model['content'] if x['type']=='directory'] + return [x for x in dir_model["content"] if x["type"] == "directory"] dirs = [ - ('', 'inroot'), - ('Directory with spaces in', 'inspace'), - (u'unicodÊ', 'innonascii'), - ('foo', 'a'), - ('foo', 'b'), - ('foo', 'name with spaces'), - ('foo', u'unicodÊ'), - ('foo/bar', 'baz'), - ('ordering', 'A'), - ('ordering', 'b'), - ('ordering', 'C'), - (u'ÃĨ b', u'ç d'), + ("", "inroot"), + ("Directory with spaces in", "inspace"), + (u"unicodÊ", "innonascii"), + ("foo", "a"), + ("foo", "b"), + ("foo", "name with spaces"), + ("foo", u"unicodÊ"), + ("foo/bar", "baz"), + ("ordering", "A"), + ("ordering", "b"), + ("ordering", "C"), + (u"ÃĨ b", u"ç d"), ] @pytest.fixture(params=["FileContentsManager", "AsyncFileContentsManager"]) def jp_argv(request): - return ["--ServerApp.contents_manager_class=jupyter_server.services.contents.filemanager." + request.param] + return [ + "--ServerApp.contents_manager_class=jupyter_server.services.contents.filemanager." + + request.param + ] @pytest.fixture @@ -54,9 +56,9 @@ def contents_dir(tmp_path, jp_serverapp): def contents(contents_dir): # Create files in temporary directory paths = { - 'notebooks': [], - 'textfiles': [], - 'blobs': [], + "notebooks": [], + "textfiles": [], + "blobs": [], } for d, name in dirs: p = contents_dir / d @@ -64,22 +66,22 @@ def contents(contents_dir): # Create a notebook nb = writes(new_notebook(), version=4) - nbname = p.joinpath('{}.ipynb'.format(name)) - nbname.write_text(nb, encoding='utf-8') - paths['notebooks'].append(nbname.relative_to(contents_dir)) + nbname = p.joinpath("{}.ipynb".format(name)) + nbname.write_text(nb, encoding="utf-8") + paths["notebooks"].append(nbname.relative_to(contents_dir)) # Create a text file - txt = '{} text file'.format(name) - txtname = p.joinpath('{}.txt'.format(name)) - txtname.write_text(txt, encoding='utf-8') - paths['textfiles'].append(txtname.relative_to(contents_dir)) + txt = "{} text file".format(name) + txtname = p.joinpath("{}.txt".format(name)) + txtname.write_text(txt, encoding="utf-8") + paths["textfiles"].append(txtname.relative_to(contents_dir)) # Create a random blob - blob = name.encode('utf-8') + b'\xFF' - blobname = p.joinpath('{}.blob'.format(name)) + blob = name.encode("utf-8") + b"\xFF" + blobname = p.joinpath("{}.blob".format(name)) blobname.write_bytes(blob) - paths['blobs'].append(blobname.relative_to(contents_dir)) - paths['all'] = list(paths.values()) + paths["blobs"].append(blobname.relative_to(contents_dir)) + paths["all"] = list(paths.values()) return paths @@ -88,435 +90,385 @@ def folders(): return list(set(item[0] for item in dirs)) -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_list_notebooks(jp_fetch, contents, path, name): response = await jp_fetch( - 'api', 'contents', path, - method='GET', + "api", + "contents", + path, + method="GET", ) data = json.loads(response.body.decode()) nbs = notebooks_only(data) assert len(nbs) > 0 - assert name+'.ipynb' in [n['name'] for n in nbs] - assert url_path_join(path, name+'.ipynb') in [n['path'] for n in nbs] + assert name + ".ipynb" in [n["name"] for n in nbs] + assert url_path_join(path, name + ".ipynb") in [n["path"] for n in nbs] -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_dir_no_contents(jp_fetch, contents, path, name): response = await jp_fetch( - 'api', 'contents', path, - method='GET', + "api", + "contents", + path, + method="GET", params=dict( - content='0', - ) + content="0", + ), ) model = json.loads(response.body.decode()) - assert model['path'] == path - assert model['type'] == 'directory' - assert 'content' in model - assert model['content'] is None + assert model["path"] == path + assert model["type"] == "directory" + assert "content" in model + assert model["content"] is None async def test_list_nonexistant_dir(jp_fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError): await jp_fetch( - 'api', 'contents', 'nonexistant', - method='GET', + "api", + "contents", + "nonexistant", + method="GET", ) -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_nb_contents(jp_fetch, contents, path, name): - nbname = name+'.ipynb' - nbpath = (path + '/' + nbname).lstrip('/') - r = await jp_fetch( - 'api', 'contents', nbpath, - method='GET', - params=dict(content='1') - ) + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await jp_fetch("api", "contents", nbpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) - assert model['name'] == nbname - assert model['path'] == nbpath - assert model['type'] == 'notebook' - assert 'content' in model - assert model['format'] == 'json' - assert 'metadata' in model['content'] - assert isinstance(model['content']['metadata'], dict) + assert model["name"] == nbname + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert model["format"] == "json" + assert "metadata" in model["content"] + assert isinstance(model["content"]["metadata"], dict) -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_nb_no_contents(jp_fetch, contents, path, name): - nbname = name+'.ipynb' - nbpath = (path + '/' + nbname).lstrip('/') - r = await jp_fetch( - 'api', 'contents', nbpath, - method='GET', - params=dict(content='0') - ) + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await jp_fetch("api", "contents", nbpath, method="GET", params=dict(content="0")) model = json.loads(r.body.decode()) - assert model['name'] == nbname - assert model['path'] == nbpath - assert model['type'] == 'notebook' - assert 'content' in model - assert model['content'] is None + assert model["name"] == nbname + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert model["content"] is None async def test_get_nb_invalid(contents_dir, jp_fetch, contents): nb = { - 'nbformat': 4, - 'metadata': {}, - 'cells': [{ - 'cell_type': 'wrong', - 'metadata': {}, - }], + "nbformat": 4, + "metadata": {}, + "cells": [ + { + "cell_type": "wrong", + "metadata": {}, + } + ], } - nbpath = u'ÃĨ b/Validate tÊst.ipynb' + nbpath = u"ÃĨ b/Validate tÊst.ipynb" (contents_dir / nbpath).write_text(json.dumps(nb)) r = await jp_fetch( - 'api', 'contents', nbpath, - method='GET', + "api", + "contents", + nbpath, + method="GET", ) model = json.loads(r.body.decode()) - assert model['path'] == nbpath - assert model['type'] == 'notebook' - assert 'content' in model - assert 'message' in model - assert 'validation failed' in model['message'].lower() + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert "message" in model + assert "validation failed" in model["message"].lower() async def test_get_contents_no_such_file(jp_fetch): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( - 'api', 'contents', 'foo/q.ipynb', - method='GET', + "api", + "contents", + "foo/q.ipynb", + method="GET", ) assert e.value.code == 404 -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_text_file_contents(jp_fetch, contents, path, name): - txtname = name+'.txt' - txtpath = (path + '/' + txtname).lstrip('/') - r = await jp_fetch( - 'api', 'contents', txtpath, - method='GET', - params=dict(content='1') - ) + txtname = name + ".txt" + txtpath = (path + "/" + txtname).lstrip("/") + r = await jp_fetch("api", "contents", txtpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) - assert model['name'] == txtname - assert model['path'] == txtpath - assert 'content' in model - assert model['format'] == 'text' - assert model['type'] == 'file' - assert model['content'] == '{} text file'.format(name) + assert model["name"] == txtname + assert model["path"] == txtpath + assert "content" in model + assert model["format"] == "text" + assert model["type"] == "file" + assert model["content"] == "{} text file".format(name) with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( - 'api', 'contents', 'foo/q.txt', - method='GET', + "api", + "contents", + "foo/q.txt", + method="GET", ) assert expected_http_error(e, 404) with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( - 'api', 'contents', 'foo/bar/baz.blob', - method='GET', - params=dict( - type='file', - format='text' - ) + "api", + "contents", + "foo/bar/baz.blob", + method="GET", + params=dict(type="file", format="text"), ) assert expected_http_error(e, 400) - -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_binary_file_contents(jp_fetch, contents, path, name): - blobname = name+'.blob' - blobpath = (path + '/' + blobname).lstrip('/') - r = await jp_fetch( - 'api', 'contents', blobpath, - method='GET', - params=dict(content='1') - ) + blobname = name + ".blob" + blobpath = (path + "/" + blobname).lstrip("/") + r = await jp_fetch("api", "contents", blobpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) - assert model['name'] == blobname - assert model['path'] == blobpath - assert 'content' in model - assert model['format'] == 'base64' - assert model['type'] == 'file' - data_out = decodebytes(model['content'].encode('ascii')) - data_in = name.encode('utf-8') + b'\xFF' + assert model["name"] == blobname + assert model["path"] == blobpath + assert "content" in model + assert model["format"] == "base64" + assert model["type"] == "file" + data_out = decodebytes(model["content"].encode("ascii")) + data_in = name.encode("utf-8") + b"\xFF" assert data_in == data_out with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( - 'api', 'contents', 'foo/q.txt', - method='GET', + "api", + "contents", + "foo/q.txt", + method="GET", ) assert expected_http_error(e, 404) async def test_get_bad_type(jp_fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - path = 'unicodÊ' - type = 'file' + path = "unicodÊ" + type = "file" await jp_fetch( - 'api', 'contents', path, - method='GET', - params=dict(type=type) # This should be a directory, and thus throw and error + "api", + "contents", + path, + method="GET", + params=dict(type=type), # This should be a directory, and thus throw and error ) - assert expected_http_error(e, 400, '%s is a directory, not a %s' % (path, type)) + assert expected_http_error(e, 400, "%s is a directory, not a %s" % (path, type)) with pytest.raises(tornado.httpclient.HTTPClientError) as e: - path = 'unicodÊ/innonascii.ipynb' - type = 'directory' + path = "unicodÊ/innonascii.ipynb" + type = "directory" await jp_fetch( - 'api', 'contents', path, - method='GET', - params=dict(type=type) # This should be a file, and thus throw and error + "api", + "contents", + path, + method="GET", + params=dict(type=type), # This should be a file, and thus throw and error ) - assert expected_http_error(e, 400, '%s is not a directory' % path) + assert expected_http_error(e, 400, "%s is not a directory" % path) + @pytest.fixture def _check_created(jp_base_url): - def _inner(r, contents_dir, path, name, type='notebook'): - fpath = path+'/'+name + def _inner(r, contents_dir, path, name, type="notebook"): + fpath = path + "/" + name assert r.code == 201 - location = jp_base_url + 'api/contents/' + tornado.escape.url_escape(fpath, plus=False) - assert r.headers['Location'] == location + location = jp_base_url + "api/contents/" + tornado.escape.url_escape(fpath, plus=False) + assert r.headers["Location"] == location model = json.loads(r.body.decode()) - assert model['name'] == name - assert model['path'] == fpath - assert model['type'] == type - path = contents_dir + '/' + fpath - if type == 'directory': + assert model["name"] == name + assert model["path"] == fpath + assert model["type"] == type + path = contents_dir + "/" + fpath + if type == "directory": assert pathlib.Path(path).is_dir() else: assert pathlib.Path(path).is_file() + return _inner + async def test_create_untitled(jp_fetch, contents, contents_dir, _check_created): - path = 'ÃĨ b' - name = 'Untitled.ipynb' - r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'ext': '.ipynb'}) - ) - _check_created(r, str(contents_dir), path, name, type='notebook') + path = "ÃĨ b" + name = "Untitled.ipynb" + r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) + _check_created(r, str(contents_dir), path, name, type="notebook") - name = 'Untitled1.ipynb' - r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'ext': '.ipynb'}) - ) - _check_created(r, str(contents_dir), path, name, type='notebook') + name = "Untitled1.ipynb" + r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) + _check_created(r, str(contents_dir), path, name, type="notebook") - path = 'foo/bar' - name = 'Untitled.ipynb' - r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'ext': '.ipynb'}) - ) - _check_created(r, str(contents_dir), path, name, type='notebook') + path = "foo/bar" + name = "Untitled.ipynb" + r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) + _check_created(r, str(contents_dir), path, name, type="notebook") async def test_create_untitled_txt(jp_fetch, contents, contents_dir, _check_created): - name = 'untitled.txt' - path = 'foo/bar' - r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'ext': '.txt'}) - ) - _check_created(r, str(contents_dir), path, name, type='file') + name = "untitled.txt" + path = "foo/bar" + r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".txt"})) + _check_created(r, str(contents_dir), path, name, type="file") - r = await jp_fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - assert model['type'] == 'file' - assert model['format'] == 'text' - assert model['content'] == '' + assert model["type"] == "file" + assert model["format"] == "text" + assert model["content"] == "" async def test_upload(jp_fetch, contents, contents_dir, _check_created): nb = new_notebook() - nbmodel = {'content': nb, 'type': 'notebook'} - path = 'ÃĨ b' - name = 'Upload tÊst.ipynb' - r = await jp_fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps(nbmodel) - ) + nbmodel = {"content": nb, "type": "notebook"} + path = "ÃĨ b" + name = "Upload tÊst.ipynb" + r = await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(nbmodel)) _check_created(r, str(contents_dir), path, name) async def test_mkdir_untitled(jp_fetch, contents, contents_dir, _check_created): - name = 'Untitled Folder' - path = 'ÃĨ b' + name = "Untitled Folder" + path = "ÃĨ b" r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'type': 'directory'}) + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) - _check_created(r, str(contents_dir), path, name, type='directory') + _check_created(r, str(contents_dir), path, name, type="directory") - name = 'Untitled Folder 1' + name = "Untitled Folder 1" r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'type': 'directory'}) + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) - _check_created(r, str(contents_dir), path, name, type='directory') + _check_created(r, str(contents_dir), path, name, type="directory") - name = 'Untitled Folder' - path = 'foo/bar' + name = "Untitled Folder" + path = "foo/bar" r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'type': 'directory'}) + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) - _check_created(r, str(contents_dir), path, name, type='directory') + _check_created(r, str(contents_dir), path, name, type="directory") async def test_mkdir(jp_fetch, contents, contents_dir, _check_created): - name = 'New ∂ir' - path = 'ÃĨ b' + name = "New ∂ir" + path = "ÃĨ b" r = await jp_fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps({'type': 'directory'}) + "api", "contents", path, name, method="PUT", body=json.dumps({"type": "directory"}) ) - _check_created(r, str(contents_dir), path, name, type='directory') + _check_created(r, str(contents_dir), path, name, type="directory") async def test_mkdir_hidden_400(jp_fetch): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( - 'api', 'contents', 'ÃĨ b/.hidden', - method='PUT', - body=json.dumps({'type': 'directory'}) + "api", "contents", "ÃĨ b/.hidden", method="PUT", body=json.dumps({"type": "directory"}) ) assert expected_http_error(e, 400) async def test_upload_txt(jp_fetch, contents, contents_dir, _check_created): - body = 'Ãŧnicode tÊxt' + body = "Ãŧnicode tÊxt" model = { - 'content' : body, - 'format' : 'text', - 'type' : 'file', + "content": body, + "format": "text", + "type": "file", } - path = 'ÃĨ b' - name = 'Upload tÊst.txt' - await jp_fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps(model) - ) + path = "ÃĨ b" + name = "Upload tÊst.txt" + await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(model)) # check roundtrip - r = await jp_fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - assert model['type'] == 'file' - assert model['format'] == 'text' - assert model['path'] == path+'/'+name - assert model['content'] == body + assert model["type"] == "file" + assert model["format"] == "text" + assert model["path"] == path + "/" + name + assert model["content"] == body async def test_upload_b64(jp_fetch, contents, contents_dir, _check_created): - body = b'\xFFblob' - b64body = encodebytes(body).decode('ascii') + body = b"\xFFblob" + b64body = encodebytes(body).decode("ascii") model = { - 'content' : b64body, - 'format' : 'base64', - 'type' : 'file', + "content": b64body, + "format": "base64", + "type": "file", } - path = 'ÃĨ b' - name = 'Upload tÊst.blob' - await jp_fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps(model) - ) + path = "ÃĨ b" + name = "Upload tÊst.blob" + await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(model)) # check roundtrip - r = await jp_fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - assert model['type'] == 'file' - assert model['path'] == path+'/'+name - assert model['format'] == 'base64' - decoded = decodebytes(model['content'].encode('ascii')) + assert model["type"] == "file" + assert model["path"] == path + "/" + name + assert model["format"] == "base64" + decoded = decodebytes(model["content"].encode("ascii")) assert decoded == body async def test_copy(jp_fetch, contents, contents_dir, _check_created): - path = 'ÃĨ b' - name = 'ç d.ipynb' - copy = 'ç d-Copy1.ipynb' + path = "ÃĨ b" + name = "ç d.ipynb" + copy = "ç d-Copy1.ipynb" r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'copy_from': path+'/'+name}) + "api", "contents", path, method="POST", body=json.dumps({"copy_from": path + "/" + name}) ) - _check_created(r, str(contents_dir), path, copy, type='notebook') + _check_created(r, str(contents_dir), path, copy, type="notebook") # Copy the same file name - copy2 = 'ç d-Copy2.ipynb' + copy2 = "ç d-Copy2.ipynb" r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'copy_from': path+'/'+name}) + "api", "contents", path, method="POST", body=json.dumps({"copy_from": path + "/" + name}) ) - _check_created(r, str(contents_dir), path, copy2, type='notebook') + _check_created(r, str(contents_dir), path, copy2, type="notebook") # copy a copy. - copy3 = 'ç d-Copy3.ipynb' + copy3 = "ç d-Copy3.ipynb" r = await jp_fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'copy_from': path+'/'+copy2}) + "api", "contents", path, method="POST", body=json.dumps({"copy_from": path + "/" + copy2}) ) - _check_created(r, str(contents_dir), path, copy3, type='notebook') + _check_created(r, str(contents_dir), path, copy3, type="notebook") async def test_copy_path(jp_fetch, contents, contents_dir, _check_created): - path1 = 'foo' - path2 = 'ÃĨ b' - name = 'a.ipynb' - copy = 'a-Copy1.ipynb' + path1 = "foo" + path2 = "ÃĨ b" + name = "a.ipynb" + copy = "a-Copy1.ipynb" r = await jp_fetch( - 'api', 'contents', path2, - method='POST', - body=json.dumps({'copy_from': path1+'/'+name}) + "api", "contents", path2, method="POST", body=json.dumps({"copy_from": path1 + "/" + name}) ) - _check_created(r, str(contents_dir), path2, name, type='notebook') + _check_created(r, str(contents_dir), path2, name, type="notebook") r = await jp_fetch( - 'api', 'contents', path2, - method='POST', - body=json.dumps({'copy_from': path1+'/'+name}) + "api", "contents", path2, method="POST", body=json.dumps({"copy_from": path1 + "/" + name}) ) - _check_created(r, str(contents_dir), path2, copy, type='notebook') + _check_created(r, str(contents_dir), path2, copy, type="notebook") async def test_copy_put_400(jp_fetch, contents, contents_dir, _check_created): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( - 'api', 'contents', 'ÃĨ b/cøpy.ipynb', - method='PUT', - body=json.dumps({'copy_from': 'ÃĨ b/ç d.ipynb'}) + "api", + "contents", + "ÃĨ b/cøpy.ipynb", + method="PUT", + body=json.dumps({"copy_from": "ÃĨ b/ç d.ipynb"}), ) assert expected_http_error(e, 400) @@ -524,344 +476,272 @@ async def test_copy_put_400(jp_fetch, contents, contents_dir, _check_created): async def test_copy_dir_400(jp_fetch, contents, contents_dir, _check_created): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( - 'api', 'contents', 'foo', - method='POST', - body=json.dumps({'copy_from': 'ÃĨ b'}) + "api", "contents", "foo", method="POST", body=json.dumps({"copy_from": "ÃĨ b"}) ) assert expected_http_error(e, 400) -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_delete(jp_fetch, contents, contents_dir, path, name, _check_created): - nbname = name+'.ipynb' - nbpath = (path + '/' + nbname).lstrip('/') + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") r = await jp_fetch( - 'api', 'contents', nbpath, - method='DELETE', + "api", + "contents", + nbpath, + method="DELETE", ) assert r.code == 204 async def test_delete_dirs(jp_fetch, contents, folders): # Iterate over folders - for name in sorted(folders + ['/'], key=len, reverse=True): - r = await jp_fetch( - 'api', 'contents', name, - method='GET' - ) + for name in sorted(folders + ["/"], key=len, reverse=True): + r = await jp_fetch("api", "contents", name, method="GET") # Get JSON blobs for each content. - listing = json.loads(r.body.decode())['content'] + listing = json.loads(r.body.decode())["content"] # Delete all content for model in listing: - await jp_fetch( - 'api', 'contents', model['path'], - method='DELETE' - ) + await jp_fetch("api", "contents", model["path"], method="DELETE") # Make sure all content has been deleted. - r = await jp_fetch( - 'api', 'contents', - method='GET' - ) + r = await jp_fetch("api", "contents", method="GET") model = json.loads(r.body.decode()) - assert model['content'] == [] + assert model["content"] == [] -@pytest.mark.skipif(sys.platform == 'win32', reason="Disabled deleting non-empty dirs on Windows") +@pytest.mark.skipif(sys.platform == "win32", reason="Disabled deleting non-empty dirs on Windows") async def test_delete_non_empty_dir(jp_fetch, contents): # Delete a folder - await jp_fetch( - 'api', 'contents', 'ÃĨ b', - method='DELETE' - ) + await jp_fetch("api", "contents", "ÃĨ b", method="DELETE") # Check that the folder was been deleted. with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch( - 'api', 'contents', 'ÃĨ b', - method='GET' - ) + await jp_fetch("api", "contents", "ÃĨ b", method="GET") assert expected_http_error(e, 404) async def test_rename(jp_fetch, jp_base_url, contents, contents_dir): - path = 'foo' - name = 'a.ipynb' - new_name = 'z.ipynb' + path = "foo" + name = "a.ipynb" + new_name = "z.ipynb" # Rename the file r = await jp_fetch( - 'api', 'contents', path, name, - method='PATCH', - body=json.dumps({'path': path+'/'+new_name}) + "api", + "contents", + path, + name, + method="PATCH", + body=json.dumps({"path": path + "/" + new_name}), ) - fpath = path+'/'+new_name + fpath = path + "/" + new_name assert r.code == 200 - location = url_path_join(jp_base_url, 'api/contents/', fpath) - assert r.headers['Location'] == location + location = url_path_join(jp_base_url, "api/contents/", fpath) + assert r.headers["Location"] == location model = json.loads(r.body.decode()) - assert model['name'] == new_name - assert model['path'] == fpath + assert model["name"] == new_name + assert model["path"] == fpath fpath = str(contents_dir / fpath) assert pathlib.Path(fpath).is_file() # Check that the files have changed - r = await jp_fetch( - 'api', 'contents', path, - method='GET' - ) + r = await jp_fetch("api", "contents", path, method="GET") listing = json.loads(r.body.decode()) - nbnames = [name['name'] for name in listing['content']] - assert 'z.ipynb' in nbnames - assert 'a.ipynb' not in nbnames + nbnames = [name["name"] for name in listing["content"]] + assert "z.ipynb" in nbnames + assert "a.ipynb" not in nbnames async def test_checkpoints_follow_file(jp_fetch, contents): - path = 'foo' - name = 'a.ipynb' + path = "foo" + name = "a.ipynb" # Read initial file. - r = await jp_fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) # Create a checkpoint of initial state r = await jp_fetch( - 'api', 'contents', path, name, 'checkpoints', - method='POST', - allow_nonstandard_methods=True + "api", "contents", path, name, "checkpoints", method="POST", allow_nonstandard_methods=True ) cp1 = json.loads(r.body.decode()) # Modify file and save. - nbcontent = model['content'] + nbcontent = model["content"] nb = from_dict(nbcontent) - hcell = new_markdown_cell('Created by test') + hcell = new_markdown_cell("Created by test") nb.cells.append(hcell) - nbmodel = {'content': nb, 'type': 'notebook'} - await jp_fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps(nbmodel) - ) + nbmodel = {"content": nb, "type": "notebook"} + await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(nbmodel)) # List checkpoints r = await jp_fetch( - 'api', 'contents', path, name, 'checkpoints', - method='GET', + "api", + "contents", + path, + name, + "checkpoints", + method="GET", ) cps = json.loads(r.body.decode()) assert cps == [cp1] - r = await jp_fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - nbcontent = model['content'] + nbcontent = model["content"] nb = from_dict(nbcontent) assert nb.cells[0].source == "Created by test" async def test_rename_existing(jp_fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - path = 'foo' - name = 'a.ipynb' - new_name = 'b.ipynb' + path = "foo" + name = "a.ipynb" + new_name = "b.ipynb" # Rename the file await jp_fetch( - 'api', 'contents', path, name, - method='PATCH', - body=json.dumps({'path': path+'/'+new_name}) + "api", + "contents", + path, + name, + method="PATCH", + body=json.dumps({"path": path + "/" + new_name}), ) assert expected_http_error(e, 409) async def test_save(jp_fetch, contents): - r = await jp_fetch( - 'api', 'contents', 'foo/a.ipynb', - method='GET' - ) + r = await jp_fetch("api", "contents", "foo/a.ipynb", method="GET") model = json.loads(r.body.decode()) - nbmodel = model['content'] + nbmodel = model["content"] nb = from_dict(nbmodel) - nb.cells.append(new_markdown_cell('Created by test Âŗ')) - nbmodel = {'content': nb, 'type': 'notebook'} - await jp_fetch( - 'api', 'contents', 'foo/a.ipynb', - method='PUT', - body=json.dumps(nbmodel) - ) + nb.cells.append(new_markdown_cell("Created by test Âŗ")) + nbmodel = {"content": nb, "type": "notebook"} + await jp_fetch("api", "contents", "foo/a.ipynb", method="PUT", body=json.dumps(nbmodel)) # Round trip. - r = await jp_fetch( - 'api', 'contents', 'foo/a.ipynb', - method='GET' - ) + r = await jp_fetch("api", "contents", "foo/a.ipynb", method="GET") model = json.loads(r.body.decode()) - newnb = from_dict(model['content']) - assert newnb.cells[0].source == 'Created by test Âŗ' + newnb = from_dict(model["content"]) + assert newnb.cells[0].source == "Created by test Âŗ" async def test_checkpoints(jp_fetch, contents): - path = 'foo/a.ipynb' - resp = await jp_fetch( - 'api', 'contents', path, - method='GET' - ) + path = "foo/a.ipynb" + resp = await jp_fetch("api", "contents", path, method="GET") model = json.loads(resp.body.decode()) r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', - method='POST', - allow_nonstandard_methods=True + "api", "contents", path, "checkpoints", method="POST", allow_nonstandard_methods=True ) assert r.code == 201 cp1 = json.loads(r.body.decode()) - assert set(cp1) == {'id', 'last_modified'} - assert r.headers['Location'].split('/')[-1] == cp1['id'] + assert set(cp1) == {"id", "last_modified"} + assert r.headers["Location"].split("/")[-1] == cp1["id"] # Modify it. - nbcontent = model['content'] + nbcontent = model["content"] nb = from_dict(nbcontent) - hcell = new_markdown_cell('Created by test') + hcell = new_markdown_cell("Created by test") nb.cells.append(hcell) # Save it. - nbmodel = {'content': nb, 'type': 'notebook'} - await jp_fetch( - 'api', 'contents', path, - method='PUT', - body=json.dumps(nbmodel) - ) + nbmodel = {"content": nb, "type": "notebook"} + await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(nbmodel)) # List checkpoints - r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', - method='GET' - ) + r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [cp1] - r = await jp_fetch( - 'api', 'contents', path, - method='GET' - ) - nbcontent = json.loads(r.body.decode())['content'] + r = await jp_fetch("api", "contents", path, method="GET") + nbcontent = json.loads(r.body.decode())["content"] nb = from_dict(nbcontent) - assert nb.cells[0].source == 'Created by test' + assert nb.cells[0].source == "Created by test" # Restore Checkpoint cp1 r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', cp1['id'], - method='POST', - allow_nonstandard_methods=True + "api", + "contents", + path, + "checkpoints", + cp1["id"], + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 204 - r = await jp_fetch( - 'api', 'contents', path, - method='GET' - ) - nbcontent = json.loads(r.body.decode())['content'] + r = await jp_fetch("api", "contents", path, method="GET") + nbcontent = json.loads(r.body.decode())["content"] nb = from_dict(nbcontent) assert nb.cells == [] # Delete cp1 - r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', cp1['id'], - method='DELETE' - ) + r = await jp_fetch("api", "contents", path, "checkpoints", cp1["id"], method="DELETE") assert r.code == 204 - r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', - method='GET' - ) + r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [] async def test_file_checkpoints(jp_fetch, contents): - path = 'foo/a.txt' - resp = await jp_fetch( - 'api', 'contents', path, - method='GET' - ) - orig_content = json.loads(resp.body.decode())['content'] + path = "foo/a.txt" + resp = await jp_fetch("api", "contents", path, method="GET") + orig_content = json.loads(resp.body.decode())["content"] r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', - method='POST', - allow_nonstandard_methods=True + "api", "contents", path, "checkpoints", method="POST", allow_nonstandard_methods=True ) assert r.code == 201 cp1 = json.loads(r.body.decode()) - assert set(cp1) == {'id', 'last_modified'} - assert r.headers['Location'].split('/')[-1] == cp1['id'] + assert set(cp1) == {"id", "last_modified"} + assert r.headers["Location"].split("/")[-1] == cp1["id"] # Modify it. - new_content = orig_content + '\nsecond line' + new_content = orig_content + "\nsecond line" model = { - 'content': new_content, - 'type': 'file', - 'format': 'text', + "content": new_content, + "type": "file", + "format": "text", } # Save it. - await jp_fetch( - 'api', 'contents', path, - method='PUT', - body=json.dumps(model) - ) + await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(model)) # List checkpoints - r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', - method='GET' - ) + r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [cp1] - r = await jp_fetch( - 'api', 'contents', path, - method='GET' - ) - content = json.loads(r.body.decode())['content'] + r = await jp_fetch("api", "contents", path, method="GET") + content = json.loads(r.body.decode())["content"] assert content == new_content # Restore Checkpoint cp1 r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', cp1['id'], - method='POST', - allow_nonstandard_methods=True + "api", + "contents", + path, + "checkpoints", + cp1["id"], + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 204 - r = await jp_fetch( - 'api', 'contents', path, - method='GET' - ) - restored_content = json.loads(r.body.decode())['content'] + r = await jp_fetch("api", "contents", path, method="GET") + restored_content = json.loads(r.body.decode())["content"] assert restored_content == orig_content # Delete cp1 - r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', cp1['id'], - method='DELETE' - ) + r = await jp_fetch("api", "contents", path, "checkpoints", cp1["id"], method="DELETE") assert r.code == 204 - r = await jp_fetch( - 'api', 'contents', path, 'checkpoints', - method='GET' - ) + r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [] async def test_trust(jp_fetch, contents): # It should be able to trust a notebook that exists - for path in contents['notebooks']: + for path in contents["notebooks"]: r = await jp_fetch( - 'api', 'contents', str(path), 'trust', - method='POST', - allow_nonstandard_methods=True + "api", "contents", str(path), "trust", method="POST", allow_nonstandard_methods=True ) assert r.code == 201 diff --git a/jupyter_server/tests/services/contents/test_config.py b/jupyter_server/tests/services/contents/test_config.py index 7fb2289eaf..741c10c139 100644 --- a/jupyter_server/tests/services/contents/test_config.py +++ b/jupyter_server/tests/services/contents/test_config.py @@ -1,23 +1,27 @@ import pytest from jupyter_server.services.contents.checkpoints import AsyncCheckpoints -from jupyter_server.services.contents.filecheckpoints import AsyncGenericFileCheckpoints, GenericFileCheckpoints +from jupyter_server.services.contents.filecheckpoints import AsyncGenericFileCheckpoints +from jupyter_server.services.contents.filecheckpoints import GenericFileCheckpoints from jupyter_server.services.contents.manager import AsyncContentsManager @pytest.fixture(params=[AsyncGenericFileCheckpoints, GenericFileCheckpoints]) def jp_server_config(request): - return {'FileContentsManager': {'checkpoints_class': request.param}} + return {"FileContentsManager": {"checkpoints_class": request.param}} def test_config_did_something(jp_server_config, jp_serverapp): - assert isinstance(jp_serverapp.contents_manager.checkpoints, - jp_server_config['FileContentsManager']['checkpoints_class']) + assert isinstance( + jp_serverapp.contents_manager.checkpoints, + jp_server_config["FileContentsManager"]["checkpoints_class"], + ) async def test_async_contents_manager(jp_configurable_serverapp): - config = {'ContentsManager': {'checkpoints_class': AsyncCheckpoints}} - argv = ['--ServerApp.contents_manager_class=jupyter_server.services.contents.manager.AsyncContentsManager'] + config = {"ContentsManager": {"checkpoints_class": AsyncCheckpoints}} + argv = [ + "--ServerApp.contents_manager_class=jupyter_server.services.contents.manager.AsyncContentsManager" + ] app = jp_configurable_serverapp(config=config, argv=argv) assert isinstance(app.contents_manager, AsyncContentsManager) - diff --git a/jupyter_server/tests/services/contents/test_fileio.py b/jupyter_server/tests/services/contents/test_fileio.py index 9a5f6fe659..e837be717e 100644 --- a/jupyter_server/tests/services/contents/test_fileio.py +++ b/jupyter_server/tests/services/contents/test_fileio.py @@ -1,10 +1,11 @@ +import functools import io import os import stat -import functools +import sys + import decorator import pytest -import sys from ipython_genutils.testing.decorators import skip_win32 as _skip_win32 from jupyter_server.services.contents.fileio import atomic_writing @@ -17,6 +18,7 @@ def skip_win32(f): def inner(f, *args, **kwargs): decorated_f = _skip_win32(f) return decorated_f(*args, **kwargs) + return decorator.decorator(inner, f) @@ -24,16 +26,17 @@ def inner(f, *args, **kwargs): def test_atomic_writing(tmp_path): - class CustomExc(Exception): pass + class CustomExc(Exception): + pass - f1 = tmp_path / 'penguin' - f1.write_text('Before') + f1 = tmp_path / "penguin" + f1.write_text("Before") - if os.name != 'nt': + if os.name != "nt": os.chmod(str(f1), 0o701) orig_mode = stat.S_IMODE(os.stat(str(f1)).st_mode) - f2 = tmp_path / 'flamingo' + f2 = tmp_path / "flamingo" try: os.symlink(str(f1), str(f2)) have_symlink = True @@ -45,29 +48,29 @@ class CustomExc(Exception): pass with pytest.raises(CustomExc): with atomic_writing(str(f1)) as f: - f.write('Failing write') + f.write("Failing write") raise CustomExc - with io.open(str(f1), 'r') as f: - assert f.read() == 'Before' - + with io.open(str(f1), "r") as f: + assert f.read() == "Before" + with atomic_writing(str(f1)) as f: - f.write('Overwritten') + f.write("Overwritten") - with io.open(str(f1), 'r') as f: - assert f.read() == 'Overwritten' + with io.open(str(f1), "r") as f: + assert f.read() == "Overwritten" - if os.name != 'nt': + if os.name != "nt": mode = stat.S_IMODE(os.stat(str(f1)).st_mode) assert mode == orig_mode if have_symlink: # Check that writing over a file preserves a symlink with atomic_writing(str(f2)) as f: - f.write(u'written from symlink') - - with io.open(str(f1), 'r') as f: - assert f.read() == 'written from symlink' + f.write(u"written from symlink") + + with io.open(str(f1), "r") as f: + assert f.read() == "written from symlink" @pytest.fixture @@ -79,58 +82,58 @@ def handle_umask(): os.umask(umask) -@pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows") +@pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows") def test_atomic_writing_umask(handle_umask, tmp_path): os.umask(0o022) - f1 = str(tmp_path / '1') + f1 = str(tmp_path / "1") with atomic_writing(f1) as f: - f.write('1') + f.write("1") mode = stat.S_IMODE(os.stat(f1).st_mode) assert mode == 0o644 os.umask(0o057) - f2 = str(tmp_path / '2') + f2 = str(tmp_path / "2") with atomic_writing(f2) as f: - f.write('2') + f.write("2") mode = stat.S_IMODE(os.stat(f2).st_mode) assert mode == 0o620 def test_atomic_writing_newlines(tmp_path): - path = str(tmp_path / 'testfile') + path = str(tmp_path / "testfile") - lf = u'a\nb\nc\n' - plat = lf.replace(u'\n', os.linesep) - crlf = lf.replace(u'\n', u'\r\n') + lf = u"a\nb\nc\n" + plat = lf.replace(u"\n", os.linesep) + crlf = lf.replace(u"\n", u"\r\n") # test default - with io.open(path, 'w') as f: + with io.open(path, "w") as f: f.write(lf) - with io.open(path, 'r', newline='') as f: + with io.open(path, "r", newline="") as f: read = f.read() assert read == plat # test newline=LF - with io.open(path, 'w', newline='\n') as f: + with io.open(path, "w", newline="\n") as f: f.write(lf) - with io.open(path, 'r', newline='') as f: + with io.open(path, "r", newline="") as f: read = f.read() assert read == lf # test newline=CRLF - with atomic_writing(str(path), newline='\r\n') as f: + with atomic_writing(str(path), newline="\r\n") as f: f.write(lf) - with io.open(path, 'r', newline='') as f: + with io.open(path, "r", newline="") as f: read = f.read() assert read == crlf # test newline=no convert - text = u'crlf\r\ncr\rlf\n' - with atomic_writing(str(path), newline='') as f: + text = u"crlf\r\ncr\rlf\n" + with atomic_writing(str(path), newline="") as f: f.write(text) - with io.open(path, 'r', newline='') as f: + with io.open(path, "r", newline="") as f: read = f.read() - assert read == text \ No newline at end of file + assert read == text diff --git a/jupyter_server/tests/services/contents/test_largefilemanager.py b/jupyter_server/tests/services/contents/test_largefilemanager.py index bd3dfdd7a7..89103e232f 100644 --- a/jupyter_server/tests/services/contents/test_largefilemanager.py +++ b/jupyter_server/tests/services/contents/test_largefilemanager.py @@ -1,9 +1,10 @@ import pytest import tornado -from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager, LargeFileManager -from jupyter_server.utils import ensure_async from ...utils import expected_http_error +from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager +from jupyter_server.services.contents.largefilemanager import LargeFileManager +from jupyter_server.utils import ensure_async @pytest.fixture(params=[LargeFileManager, AsyncLargeFileManager]) @@ -15,83 +16,88 @@ def jp_large_contents_manager(request, tmp_path): async def test_save(jp_large_contents_manager): cm = jp_large_contents_manager - model = await ensure_async(cm.new_untitled(type='notebook')) - name = model['name'] - path = model['path'] + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] # Get the model with 'content' full_model = await ensure_async(cm.get(path)) # Save the notebook model = await ensure_async(cm.save(full_model, path)) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == name - assert model['path'] == path + assert "name" in model + assert "path" in model + assert model["name"] == name + assert model["path"] == path @pytest.mark.parametrize( - 'model,err_message', + "model,err_message", [ ( - {'name': 'test', 'path': 'test', 'chunk': 1}, - 'HTTP 400: Bad Request (No file type provided)' + {"name": "test", "path": "test", "chunk": 1}, + "HTTP 400: Bad Request (No file type provided)", ), ( - {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'notebook'}, - 'HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)' + {"name": "test", "path": "test", "chunk": 1, "type": "notebook"}, + 'HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)', ), ( - {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'file'}, - 'HTTP 400: Bad Request (No file content provided)', + {"name": "test", "path": "test", "chunk": 1, "type": "file"}, + "HTTP 400: Bad Request (No file content provided)", ), ( - {'name': 'test', 'path': 'test', 'chunk': 2, 'type': 'file', - 'content': u'test', 'format': 'json'}, - "HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')" - ) - ] + { + "name": "test", + "path": "test", + "chunk": 2, + "type": "file", + "content": u"test", + "format": "json", + }, + "HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')", + ), + ], ) async def test_bad_save(jp_large_contents_manager, model, err_message): with pytest.raises(tornado.web.HTTPError) as e: - await ensure_async(jp_large_contents_manager.save(model, model['path'])) + await ensure_async(jp_large_contents_manager.save(model, model["path"])) assert expected_http_error(e, 400, expected_message=err_message) async def test_saving_different_chunks(jp_large_contents_manager): cm = jp_large_contents_manager - model = {'name': 'test', 'path': 'test', 'type': 'file', - 'content': u'test==', 'format': 'text'} - name = model['name'] - path = model['path'] + model = {"name": "test", "path": "test", "type": "file", "content": u"test==", "format": "text"} + name = model["name"] + path = model["path"] await ensure_async(cm.save(model, path)) for chunk in (1, 2, -1): - for fm in ('text', 'base64'): + for fm in ("text", "base64"): full_model = await ensure_async(cm.get(path)) - full_model['chunk'] = chunk - full_model['format'] = fm + full_model["chunk"] = chunk + full_model["format"] = fm model_res = await ensure_async(cm.save(full_model, path)) assert isinstance(model_res, dict) - assert 'name' in model_res - assert 'path' in model_res - assert 'chunk' not in model_res - assert model_res['name'] == name - assert model_res['path'] == path + assert "name" in model_res + assert "path" in model_res + assert "chunk" not in model_res + assert model_res["name"] == name + assert model_res["path"] == path async def test_save_in_subdirectory(jp_large_contents_manager, tmp_path): cm = jp_large_contents_manager - sub_dir = tmp_path / 'foo' + sub_dir = tmp_path / "foo" sub_dir.mkdir() - model = await ensure_async(cm.new_untitled(path='/foo/', type='notebook')) - path = model['path'] + model = await ensure_async(cm.new_untitled(path="/foo/", type="notebook")) + path = model["path"] model = await ensure_async(cm.get(path)) # Change the name in the model for rename model = await ensure_async(cm.save(model, path)) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == 'Untitled.ipynb' - assert model['path'] == 'foo/Untitled.ipynb' \ No newline at end of file + assert "name" in model + assert "path" in model + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "foo/Untitled.ipynb" diff --git a/jupyter_server/tests/services/contents/test_manager.py b/jupyter_server/tests/services/contents/test_manager.py index 4c13cabee3..9063d22497 100644 --- a/jupyter_server/tests/services/contents/test_manager.py +++ b/jupyter_server/tests/services/contents/test_manager.py @@ -1,22 +1,27 @@ import os import sys import time -import pytest -from traitlets import TraitError -from tornado.web import HTTPError from itertools import combinations +import pytest from nbformat import v4 as nbformat +from tornado.web import HTTPError +from traitlets import TraitError -from jupyter_server.services.contents.filemanager import AsyncFileContentsManager, FileContentsManager -from jupyter_server.utils import ensure_async from ...utils import expected_http_error +from jupyter_server.services.contents.filemanager import AsyncFileContentsManager +from jupyter_server.services.contents.filemanager import FileContentsManager +from jupyter_server.utils import ensure_async -@pytest.fixture(params=[(FileContentsManager, True), - (FileContentsManager, False), - (AsyncFileContentsManager, True), - (AsyncFileContentsManager, False)]) +@pytest.fixture( + params=[ + (FileContentsManager, True), + (FileContentsManager, False), + (AsyncFileContentsManager, True), + (AsyncFileContentsManager, False), + ] +) def jp_contents_manager(request, tmp_path): contents_manager, use_atomic_writing = request.param return contents_manager(root_dir=str(tmp_path), use_atomic_writing=use_atomic_writing) @@ -26,6 +31,7 @@ def jp_contents_manager(request, tmp_path): def jp_file_contents_manager_class(request, tmp_path): return request.param + # -------------- Functions ---------------------------- @@ -52,20 +58,20 @@ def symlink(jp_contents_manager, src, dst): def add_code_cell(notebook): - output = nbformat.new_output("display_data", {'application/javascript': "alert('hi');"}) + output = nbformat.new_output("display_data", {"application/javascript": "alert('hi');"}) cell = nbformat.new_code_cell("print('hi')", outputs=[output]) notebook.cells.append(cell) async def new_notebook(jp_contents_manager): cm = jp_contents_manager - model = await ensure_async(cm.new_untitled(type='notebook')) - name = model['name'] - path = model['path'] + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] full_model = await ensure_async(cm.get(path)) - nb = full_model['content'] - nb['metadata']['counter'] = int(1e6 * time.time()) + nb = full_model["content"] + nb["metadata"]["counter"] = int(1e6 * time.time()) add_code_cell(nb) await ensure_async(cm.save(full_model, path)) @@ -82,21 +88,22 @@ async def make_populated_dir(jp_contents_manager, api_path): async def check_populated_dir_files(jp_contents_manager, api_path): dir_model = await ensure_async(jp_contents_manager.get(api_path)) - assert dir_model['path'] == api_path - assert dir_model['type'] == "directory" + assert dir_model["path"] == api_path + assert dir_model["type"] == "directory" - for entry in dir_model['content']: - if entry['type'] == "directory": + for entry in dir_model["content"]: + if entry["type"] == "directory": continue - elif entry['type'] == "file": - assert entry['name'] == "file.txt" + elif entry["type"] == "file": + assert entry["name"] == "file.txt" complete_path = "/".join([api_path, "file.txt"]) assert entry["path"] == complete_path - elif entry['type'] == "notebook": - assert entry['name'] == "nb.ipynb" + elif entry["type"] == "notebook": + assert entry["name"] == "nb.ipynb" complete_path = "/".join([api_path, "nb.ipynb"]) assert entry["path"] == complete_path + # ----------------- Tests ---------------------------------- @@ -106,43 +113,44 @@ def test_root_dir(jp_file_contents_manager_class, tmp_path): def test_missing_root_dir(jp_file_contents_manager_class, tmp_path): - root = tmp_path / 'notebook' / 'dir' / 'is' / 'missing' + root = tmp_path / "notebook" / "dir" / "is" / "missing" with pytest.raises(TraitError): jp_file_contents_manager_class(root_dir=str(root)) def test_invalid_root_dir(jp_file_contents_manager_class, tmp_path): - temp_file = tmp_path / 'file.txt' - temp_file.write_text('') + temp_file = tmp_path / "file.txt" + temp_file.write_text("") with pytest.raises(TraitError): - jp_file_contents_manager_class(root_dir=str(temp_file)) + jp_file_contents_manager_class(root_dir=str(temp_file)) + def test_get_os_path(jp_file_contents_manager_class, tmp_path): fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) - path = fm._get_os_path('/path/to/notebook/test.ipynb') - rel_path_list = '/path/to/notebook/test.ipynb'.split('/') + path = fm._get_os_path("/path/to/notebook/test.ipynb") + rel_path_list = "/path/to/notebook/test.ipynb".split("/") fs_path = os.path.join(fm.root_dir, *rel_path_list) assert path == fs_path fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) - path = fm._get_os_path('test.ipynb') - fs_path = os.path.join(fm.root_dir, 'test.ipynb') + path = fm._get_os_path("test.ipynb") + fs_path = os.path.join(fm.root_dir, "test.ipynb") assert path == fs_path fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) - path = fm._get_os_path('////test.ipynb') - fs_path = os.path.join(fm.root_dir, 'test.ipynb') + path = fm._get_os_path("////test.ipynb") + fs_path = os.path.join(fm.root_dir, "test.ipynb") assert path == fs_path def test_checkpoint_subdir(jp_file_contents_manager_class, tmp_path): - subd = 'sub ∂ir' - cp_name = 'test-cp.ipynb' + subd = "sub ∂ir" + cp_name = "test-cp.ipynb" fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) tmp_path.joinpath(subd).mkdir() cpm = fm.checkpoints - cp_dir = cpm.checkpoint_path('cp', 'test.ipynb') - cp_subdir = cpm.checkpoint_path('cp', '/%s/test.ipynb' % subd) + cp_dir = cpm.checkpoint_path("cp", "test.ipynb") + cp_subdir = cpm.checkpoint_path("cp", "/%s/test.ipynb" % subd) assert cp_dir != cp_subdir assert cp_dir == os.path.join(str(tmp_path), cpm.checkpoint_dir, cp_name) @@ -151,158 +159,154 @@ async def test_bad_symlink(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) - path = 'test bad symlink' + path = "test bad symlink" _make_dir(cm, path) - file_model = await ensure_async(cm.new_untitled(path=path, ext='.txt')) + file_model = await ensure_async(cm.new_untitled(path=path, ext=".txt")) # create a broken symlink - symlink(cm, "target", '%s/%s' % (path, 'bad symlink')) + symlink(cm, "target", "%s/%s" % (path, "bad symlink")) model = await ensure_async(cm.get(path)) - contents = { - content['name']: content for content in model['content'] - } - assert 'untitled.txt' in contents - assert contents['untitled.txt'] == file_model - assert 'bad symlink' in contents + contents = {content["name"]: content for content in model["content"]} + assert "untitled.txt" in contents + assert contents["untitled.txt"] == file_model + assert "bad symlink" in contents -@pytest.mark.skipif( - sys.platform.startswith('win'), - reason="Windows doesn't detect symlink loops" -) +@pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows doesn't detect symlink loops") async def test_recursive_symlink(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) - path = 'test recursive symlink' + path = "test recursive symlink" _make_dir(cm, path) - file_model = await ensure_async(cm.new_untitled(path=path, ext='.txt')) + file_model = await ensure_async(cm.new_untitled(path=path, ext=".txt")) # create recursive symlink - symlink(cm, '%s/%s' % (path, "recursive"), '%s/%s' % (path, "recursive")) + symlink(cm, "%s/%s" % (path, "recursive"), "%s/%s" % (path, "recursive")) model = await ensure_async(cm.get(path)) - contents = { - content['name']: content for content in model['content'] - } - assert 'untitled.txt' in contents - assert contents['untitled.txt'] == file_model + contents = {content["name"]: content for content in model["content"]} + assert "untitled.txt" in contents + assert contents["untitled.txt"] == file_model # recursive symlinks should not be shown in the contents manager - assert 'recursive' not in contents + assert "recursive" not in contents async def test_good_symlink(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) - parent = 'test good symlink' - name = 'good symlink' - path = '{0}/{1}'.format(parent, name) + parent = "test good symlink" + name = "good symlink" + path = "{0}/{1}".format(parent, name) _make_dir(cm, parent) - file_model = await ensure_async(cm.new(path=parent + '/zfoo.txt')) + file_model = await ensure_async(cm.new(path=parent + "/zfoo.txt")) # create a good symlink - symlink(cm, file_model['path'], path) + symlink(cm, file_model["path"], path) symlink_model = await ensure_async(cm.get(path, content=False)) dir_model = await ensure_async(cm.get(parent)) - assert sorted(dir_model['content'], key=lambda x: x['name']) == [symlink_model, file_model] + assert sorted(dir_model["content"], key=lambda x: x["name"]) == [symlink_model, file_model] -@pytest.mark.skipif( - sys.platform.startswith('win'), - reason="Can't test permissions on Windows" -) +@pytest.mark.skipif(sys.platform.startswith("win"), reason="Can't test permissions on Windows") async def test_403(jp_file_contents_manager_class, tmp_path): - if hasattr(os, 'getuid'): + if hasattr(os, "getuid"): if os.getuid() == 0: raise pytest.skip("Can't test permissions as root") td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) - model = await ensure_async(cm.new_untitled(type='file')) - os_path = cm._get_os_path(model['path']) + model = await ensure_async(cm.new_untitled(type="file")) + os_path = cm._get_os_path(model["path"]) os.chmod(os_path, 0o400) try: - with cm.open(os_path, 'w') as f: + with cm.open(os_path, "w") as f: f.write(u"don't care") except HTTPError as e: assert e.status_code == 403 + async def test_escape_root(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) # make foo, bar next to root - with open(os.path.join(cm.root_dir, '..', 'foo'), 'w') as f: - f.write('foo') - with open(os.path.join(cm.root_dir, '..', 'bar'), 'w') as f: - f.write('bar') + with open(os.path.join(cm.root_dir, "..", "foo"), "w") as f: + f.write("foo") + with open(os.path.join(cm.root_dir, "..", "bar"), "w") as f: + f.write("bar") with pytest.raises(HTTPError) as e: - await ensure_async(cm.get('..')) + await ensure_async(cm.get("..")) expected_http_error(e, 404) with pytest.raises(HTTPError) as e: - await ensure_async(cm.get('foo/../../../bar')) + await ensure_async(cm.get("foo/../../../bar")) expected_http_error(e, 404) with pytest.raises(HTTPError) as e: - await ensure_async(cm.delete('../foo')) + await ensure_async(cm.delete("../foo")) expected_http_error(e, 404) with pytest.raises(HTTPError) as e: - await ensure_async(cm.rename('../foo', '../bar')) + await ensure_async(cm.rename("../foo", "../bar")) expected_http_error(e, 404) with pytest.raises(HTTPError) as e: - await ensure_async(cm.save(model={ - 'type': 'file', - 'content': u'', - 'format': 'text', - }, path='../foo')) + await ensure_async( + cm.save( + model={ + "type": "file", + "content": u"", + "format": "text", + }, + path="../foo", + ) + ) expected_http_error(e, 404) async def test_new_untitled(jp_contents_manager): cm = jp_contents_manager # Test in root directory - model = await ensure_async(cm.new_untitled(type='notebook')) + model = await ensure_async(cm.new_untitled(type="notebook")) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert 'type' in model - assert model['type'] == 'notebook' - assert model['name'] == 'Untitled.ipynb' - assert model['path'] == 'Untitled.ipynb' + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "notebook" + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "Untitled.ipynb" # Test in sub-directory - model = await ensure_async(cm.new_untitled(type='directory')) + model = await ensure_async(cm.new_untitled(type="directory")) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert 'type' in model - assert model['type'] == 'directory' - assert model['name'] == 'Untitled Folder' - assert model['path'] == 'Untitled Folder' - sub_dir = model['path'] + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "directory" + assert model["name"] == "Untitled Folder" + assert model["path"] == "Untitled Folder" + sub_dir = model["path"] model = await ensure_async(cm.new_untitled(path=sub_dir)) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert 'type' in model - assert model['type'] == 'file' - assert model['name'] == 'untitled' - assert model['path'] == '%s/untitled' % sub_dir + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "file" + assert model["name"] == "untitled" + assert model["path"] == "%s/untitled" % sub_dir # Test with a compound extension - model = await ensure_async(cm.new_untitled(path=sub_dir, ext='.foo.bar')) - assert model['name'] == 'untitled.foo.bar' - model = await ensure_async(cm.new_untitled(path=sub_dir, ext='.foo.bar')) - assert model['name'] == 'untitled1.foo.bar' + model = await ensure_async(cm.new_untitled(path=sub_dir, ext=".foo.bar")) + assert model["name"] == "untitled.foo.bar" + model = await ensure_async(cm.new_untitled(path=sub_dir, ext=".foo.bar")) + assert model["name"] == "untitled1.foo.bar" async def test_modified_date(jp_contents_manager): @@ -312,125 +316,124 @@ async def test_modified_date(jp_contents_manager): model = await ensure_async(cm.get(path)) # Add a cell and save. - add_code_cell(model['content']) + add_code_cell(model["content"]) await ensure_async(cm.save(model, path)) # Reload notebook and verify that last_modified incremented. saved = await ensure_async(cm.get(path)) - assert saved['last_modified'] >= model['last_modified'] + assert saved["last_modified"] >= model["last_modified"] # Move the notebook and verify that last_modified stayed the same. # (The frontend fires a warning if last_modified increases on the # renamed file.) - new_path = 'renamed.ipynb' + new_path = "renamed.ipynb" await ensure_async(cm.rename(path, new_path)) renamed = await ensure_async(cm.get(new_path)) - assert renamed['last_modified'] >= saved['last_modified'] + assert renamed["last_modified"] >= saved["last_modified"] async def test_get(jp_contents_manager): cm = jp_contents_manager # Create a notebook - model = await ensure_async(cm.new_untitled(type='notebook')) - name = model['name'] - path = model['path'] + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] # Check that we 'get' on the notebook we just created model2 = await ensure_async(cm.get(path)) assert isinstance(model2, dict) - assert 'name' in model2 - assert 'path' in model2 - assert model['name'] == name - assert model['path'] == path + assert "name" in model2 + assert "path" in model2 + assert model["name"] == name + assert model["path"] == path - nb_as_file = await ensure_async(cm.get(path, content=True, type='file')) - assert nb_as_file['path'] == path - assert nb_as_file['type'] == 'file' - assert nb_as_file['format'] == 'text' - assert not isinstance(nb_as_file['content'], dict) + nb_as_file = await ensure_async(cm.get(path, content=True, type="file")) + assert nb_as_file["path"] == path + assert nb_as_file["type"] == "file" + assert nb_as_file["format"] == "text" + assert not isinstance(nb_as_file["content"], dict) - nb_as_bin_file = await ensure_async(cm.get(path, content=True, type='file', format='base64')) - assert nb_as_bin_file['format'] == 'base64' + nb_as_bin_file = await ensure_async(cm.get(path, content=True, type="file", format="base64")) + assert nb_as_bin_file["format"] == "base64" # Test in sub-directory - sub_dir = '/foo/' - _make_dir(cm, 'foo') - await ensure_async(cm.new_untitled(path=sub_dir, ext='.ipynb')) + sub_dir = "/foo/" + _make_dir(cm, "foo") + await ensure_async(cm.new_untitled(path=sub_dir, ext=".ipynb")) model2 = await ensure_async(cm.get(sub_dir + name)) assert isinstance(model2, dict) - assert 'name' in model2 - assert 'path' in model2 - assert 'content' in model2 - assert model2['name'] == 'Untitled.ipynb' - assert model2['path'] == '{0}/{1}'.format(sub_dir.strip('/'), name) - + assert "name" in model2 + assert "path" in model2 + assert "content" in model2 + assert model2["name"] == "Untitled.ipynb" + assert model2["path"] == "{0}/{1}".format(sub_dir.strip("/"), name) # Test with a regular file. - file_model_path = (await ensure_async(cm.new_untitled(path=sub_dir, ext='.txt')))['path'] + file_model_path = (await ensure_async(cm.new_untitled(path=sub_dir, ext=".txt")))["path"] file_model = await ensure_async(cm.get(file_model_path)) expected_model = { - 'content': u'', - 'format': u'text', - 'mimetype': u'text/plain', - 'name': u'untitled.txt', - 'path': u'foo/untitled.txt', - 'type': u'file', - 'writable': True, + "content": u"", + "format": u"text", + "mimetype": u"text/plain", + "name": u"untitled.txt", + "path": u"foo/untitled.txt", + "type": u"file", + "writable": True, } # Assert expected model is in file_model for key, value in expected_model.items(): assert file_model[key] == value - assert 'created' in file_model - assert 'last_modified' in file_model + assert "created" in file_model + assert "last_modified" in file_model # Create a sub-sub directory to test getting directory contents with a # subdir. - _make_dir(cm, 'foo/bar') - dirmodel = await ensure_async(cm.get('foo')) - assert dirmodel['type'] == 'directory' - assert isinstance(dirmodel['content'], list) - assert len(dirmodel['content']) == 3 - assert dirmodel['path'] == 'foo' - assert dirmodel['name'] == 'foo' + _make_dir(cm, "foo/bar") + dirmodel = await ensure_async(cm.get("foo")) + assert dirmodel["type"] == "directory" + assert isinstance(dirmodel["content"], list) + assert len(dirmodel["content"]) == 3 + assert dirmodel["path"] == "foo" + assert dirmodel["name"] == "foo" # Directory contents should match the contents of each individual entry # when requested with content=False. model2_no_content = await ensure_async(cm.get(sub_dir + name, content=False)) - file_model_no_content = await ensure_async(cm.get(u'foo/untitled.txt', content=False)) - sub_sub_dir_no_content = await ensure_async(cm.get('foo/bar', content=False)) - assert sub_sub_dir_no_content['path'] == 'foo/bar' - assert sub_sub_dir_no_content['name'] == 'bar' + file_model_no_content = await ensure_async(cm.get(u"foo/untitled.txt", content=False)) + sub_sub_dir_no_content = await ensure_async(cm.get("foo/bar", content=False)) + assert sub_sub_dir_no_content["path"] == "foo/bar" + assert sub_sub_dir_no_content["name"] == "bar" - for entry in dirmodel['content']: + for entry in dirmodel["content"]: # Order isn't guaranteed by the spec, so this is a hacky way of # verifying that all entries are matched. - if entry['path'] == sub_sub_dir_no_content['path']: + if entry["path"] == sub_sub_dir_no_content["path"]: assert entry == sub_sub_dir_no_content - elif entry['path'] == model2_no_content['path']: + elif entry["path"] == model2_no_content["path"]: assert entry == model2_no_content - elif entry['path'] == file_model_no_content['path']: + elif entry["path"] == file_model_no_content["path"]: assert entry == file_model_no_content else: assert False, "Unexpected directory entry: %s" % entry() with pytest.raises(HTTPError): - await ensure_async(cm.get('foo', type='file')) + await ensure_async(cm.get("foo", type="file")) async def test_update(jp_contents_manager): cm = jp_contents_manager # Create a notebook. - model = await ensure_async(cm.new_untitled(type='notebook')) - name = model['name'] - path = model['path'] + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] # Change the name in the model for rename - model['path'] = 'test.ipynb' + model["path"] = "test.ipynb" model = await ensure_async(cm.update(model, path)) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == 'test.ipynb' + assert "name" in model + assert "path" in model + assert model["name"] == "test.ipynb" # Make sure the old name is gone with pytest.raises(HTTPError): @@ -438,20 +441,20 @@ async def test_update(jp_contents_manager): # Test in sub-directory # Create a directory and notebook in that directory - sub_dir = '/foo/' - _make_dir(cm, 'foo') - model = await ensure_async(cm.new_untitled(path=sub_dir, type='notebook')) - path = model['path'] + sub_dir = "/foo/" + _make_dir(cm, "foo") + model = await ensure_async(cm.new_untitled(path=sub_dir, type="notebook")) + path = model["path"] # Change the name in the model for rename - d = path.rsplit('/', 1)[0] - new_path = model['path'] = d + '/test_in_sub.ipynb' + d = path.rsplit("/", 1)[0] + new_path = model["path"] = d + "/test_in_sub.ipynb" model = await ensure_async(cm.update(model, path)) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == 'test_in_sub.ipynb' - assert model['path'] == new_path + assert "name" in model + assert "path" in model + assert model["name"] == "test_in_sub.ipynb" + assert model["path"] == new_path # Make sure the old name is gone with pytest.raises(HTTPError): @@ -461,9 +464,9 @@ async def test_update(jp_contents_manager): async def test_save(jp_contents_manager): cm = jp_contents_manager # Create a notebook - model = await ensure_async(cm.new_untitled(type='notebook')) - name = model['name'] - path = model['path'] + model = await ensure_async(cm.new_untitled(type="notebook")) + name = model["name"] + path = model["path"] # Get the model with 'content' full_model = await ensure_async(cm.get(path)) @@ -471,26 +474,26 @@ async def test_save(jp_contents_manager): # Save the notebook model = await ensure_async(cm.save(full_model, path)) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == name - assert model['path'] == path + assert "name" in model + assert "path" in model + assert model["name"] == name + assert model["path"] == path # Test in sub-directory # Create a directory and notebook in that directory - sub_dir = '/foo/' - _make_dir(cm, 'foo') - model = await ensure_async(cm.new_untitled(path=sub_dir, type='notebook')) - path = model['path'] + sub_dir = "/foo/" + _make_dir(cm, "foo") + model = await ensure_async(cm.new_untitled(path=sub_dir, type="notebook")) + path = model["path"] model = await ensure_async(cm.get(path)) # Change the name in the model for rename model = await ensure_async(cm.save(model, path)) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == 'Untitled.ipynb' - assert model['path'] == 'foo/Untitled.ipynb' + assert "name" in model + assert "path" in model + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "foo/Untitled.ipynb" async def test_delete(jp_contents_manager): @@ -525,7 +528,7 @@ async def test_rename(jp_contents_manager): assert isinstance(await ensure_async(cm.get("changed_path")), dict) # Ported tests on nested directory renaming from pgcontents - all_dirs = ['foo', 'bar', 'foo/bar', 'foo/bar/foo', 'foo/bar/foo/bar'] + all_dirs = ["foo", "bar", "foo/bar", "foo/bar/foo", "foo/bar/foo/bar"] unchanged_dirs = all_dirs[:2] changed_dirs = all_dirs[2:] @@ -565,30 +568,30 @@ async def test_rename(jp_contents_manager): async def test_delete_root(jp_contents_manager): cm = jp_contents_manager with pytest.raises(HTTPError) as e: - await ensure_async(cm.delete('')) + await ensure_async(cm.delete("")) assert expected_http_error(e, 400) async def test_copy(jp_contents_manager): cm = jp_contents_manager - parent = u'ÃĨ b' - name = u'nb √.ipynb' - path = u'{0}/{1}'.format(parent, name) + parent = u"ÃĨ b" + name = u"nb √.ipynb" + path = u"{0}/{1}".format(parent, name) _make_dir(cm, parent) orig = await ensure_async(cm.new(path=path)) # copy with unspecified name copy = await ensure_async(cm.copy(path)) - assert copy['name'] == orig['name'].replace('.ipynb', '-Copy1.ipynb') + assert copy["name"] == orig["name"].replace(".ipynb", "-Copy1.ipynb") # copy with specified name - copy2 = await ensure_async(cm.copy(path, u'ÃĨ b/copy 2.ipynb')) - assert copy2['name'] == u'copy 2.ipynb' - assert copy2['path'] == u'ÃĨ b/copy 2.ipynb' + copy2 = await ensure_async(cm.copy(path, u"ÃĨ b/copy 2.ipynb")) + assert copy2["name"] == u"copy 2.ipynb" + assert copy2["path"] == u"ÃĨ b/copy 2.ipynb" # copy with specified path - copy2 = await ensure_async(cm.copy(path, u'/')) - assert copy2['name'] == name - assert copy2['path'] == name + copy2 = await ensure_async(cm.copy(path, u"/")) + assert copy2["name"] == name + assert copy2["path"] == name async def test_mark_trusted_cells(jp_contents_manager): @@ -597,13 +600,13 @@ async def test_mark_trusted_cells(jp_contents_manager): cm.mark_trusted_cells(nb, path) for cell in nb.cells: - if cell.cell_type == 'code': + if cell.cell_type == "code": assert not cell.metadata.trusted await ensure_async(cm.trust_notebook(path)) - nb = (await ensure_async(cm.get(path)))['content'] + nb = (await ensure_async(cm.get(path)))["content"] for cell in nb.cells: - if cell.cell_type == 'code': + if cell.cell_type == "code": assert cell.metadata.trusted @@ -616,7 +619,7 @@ async def test_check_and_sign(jp_contents_manager): assert not cm.notary.check_signature(nb) await ensure_async(cm.trust_notebook(path)) - nb = (await ensure_async(cm.get(path)))['content'] + nb = (await ensure_async(cm.get(path)))["content"] cm.mark_trusted_cells(nb, path) cm.check_and_sign(nb, path) assert cm.notary.check_signature(nb) diff --git a/jupyter_server/tests/services/kernels/test_api.py b/jupyter_server/tests/services/kernels/test_api.py index bfad2cef65..948350fba4 100644 --- a/jupyter_server/tests/services/kernels/test_api.py +++ b/jupyter_server/tests/services/kernels/test_api.py @@ -1,98 +1,75 @@ -import time import json -import pytest +import time +import pytest import tornado - from jupyter_client.kernelspec import NATIVE_KERNEL_NAME -from jupyter_server.utils import url_path_join from ...utils import expected_http_error +from jupyter_server.utils import url_path_join @pytest.fixture(params=["MappingKernelManager", "AsyncMappingKernelManager"]) def jp_argv(request): - return ["--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + request.param] + return [ + "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + + request.param + ] async def test_no_kernels(jp_fetch): - r = await jp_fetch( - 'api', 'kernels', - method='GET' - ) + r = await jp_fetch("api", "kernels", method="GET") kernels = json.loads(r.body.decode()) assert kernels == [] async def test_default_kernels(jp_fetch, jp_base_url): - r = await jp_fetch( - 'api', 'kernels', - method='POST', - allow_nonstandard_methods=True - ) + r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) - assert r.headers['location'] == url_path_join(jp_base_url, '/api/kernels/', kernel['id']) + assert r.headers["location"] == url_path_join(jp_base_url, "/api/kernels/", kernel["id"]) assert r.code == 201 assert isinstance(kernel, dict) - report_uri = url_path_join(jp_base_url, '/api/security/csp-report') - expected_csp = '; '.join([ - "frame-ancestors 'self'", - 'report-uri ' + report_uri, - "default-src 'none'" - ]) - assert r.headers['Content-Security-Policy'] == expected_csp + report_uri = url_path_join(jp_base_url, "/api/security/csp-report") + expected_csp = "; ".join( + ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] + ) + assert r.headers["Content-Security-Policy"] == expected_csp async def test_main_kernel_handler(jp_fetch, jp_base_url): # Start the first kernel r = await jp_fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME - }) + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) kernel1 = json.loads(r.body.decode()) - assert r.headers['location'] == url_path_join(jp_base_url, '/api/kernels/', kernel1['id']) + assert r.headers["location"] == url_path_join(jp_base_url, "/api/kernels/", kernel1["id"]) assert r.code == 201 assert isinstance(kernel1, dict) - report_uri = url_path_join(jp_base_url, '/api/security/csp-report') - expected_csp = '; '.join([ - "frame-ancestors 'self'", - 'report-uri ' + report_uri, - "default-src 'none'" - ]) - assert r.headers['Content-Security-Policy'] == expected_csp + report_uri = url_path_join(jp_base_url, "/api/security/csp-report") + expected_csp = "; ".join( + ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] + ) + assert r.headers["Content-Security-Policy"] == expected_csp # Check that the kernel is found in the kernel list - r = await jp_fetch( - 'api', 'kernels', - method='GET' - ) + r = await jp_fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel_list, list) - assert kernel_list[0]['id'] == kernel1['id'] - assert kernel_list[0]['name'] == kernel1['name'] + assert kernel_list[0]["id"] == kernel1["id"] + assert kernel_list[0]["name"] == kernel1["name"] # Start a second kernel r = await jp_fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME - }) + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) kernel2 = json.loads(r.body.decode()) assert isinstance(kernel2, dict) # Get kernel list again - r = await jp_fetch( - 'api', 'kernels', - method='GET' - ) + r = await jp_fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel_list, list) @@ -100,30 +77,24 @@ async def test_main_kernel_handler(jp_fetch, jp_base_url): # Interrupt a kernel r = await jp_fetch( - 'api', 'kernels', kernel2['id'], 'interrupt', - method='POST', - allow_nonstandard_methods=True + "api", "kernels", kernel2["id"], "interrupt", method="POST", allow_nonstandard_methods=True ) assert r.code == 204 # Restart a kernel r = await jp_fetch( - 'api', 'kernels', kernel2['id'], 'restart', - method='POST', - allow_nonstandard_methods=True + "api", "kernels", kernel2["id"], "restart", method="POST", allow_nonstandard_methods=True ) restarted_kernel = json.loads(r.body.decode()) - assert restarted_kernel['id'] == kernel2['id'] - assert restarted_kernel['name'] == kernel2['name'] + assert restarted_kernel["id"] == kernel2["id"] + assert restarted_kernel["name"] == kernel2["name"] # Start a kernel with a path r = await jp_fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME, - 'path': '/foo' - }) + "api", + "kernels", + method="POST", + body=json.dumps({"name": NATIVE_KERNEL_NAME, "path": "/foo"}), ) kernel3 = json.loads(r.body.decode()) assert isinstance(kernel3, dict) @@ -132,108 +103,75 @@ async def test_main_kernel_handler(jp_fetch, jp_base_url): async def test_kernel_handler(jp_fetch): # Create a kernel r = await jp_fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME - }) - ) - kernel_id = json.loads(r.body.decode())['id'] - r = await jp_fetch( - 'api', 'kernels', kernel_id, - method='GET' + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) + kernel_id = json.loads(r.body.decode())["id"] + r = await jp_fetch("api", "kernels", kernel_id, method="GET") kernel = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel, dict) - assert 'id' in kernel - assert kernel['id'] == kernel_id + assert "id" in kernel + assert kernel["id"] == kernel_id # Requests a bad kernel id. - bad_id = '111-111-111-111-111' + bad_id = "111-111-111-111-111" with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch( - 'api', 'kernels', bad_id, - method='GET' - ) + await jp_fetch("api", "kernels", bad_id, method="GET") assert expected_http_error(e, 404) # Delete kernel with id. r = await jp_fetch( - 'api', 'kernels', kernel_id, - method='DELETE', + "api", + "kernels", + kernel_id, + method="DELETE", ) assert r.code == 204 # Get list of kernels - r = await jp_fetch( - 'api', 'kernels', - method='GET' - ) + r = await jp_fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert kernel_list == [] # Request to delete a non-existent kernel id - bad_id = '111-111-111-111-111' + bad_id = "111-111-111-111-111" with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch( - 'api', 'kernels', bad_id, - method='DELETE' - ) - assert expected_http_error(e, 404, 'Kernel does not exist: ' + bad_id) + await jp_fetch("api", "kernels", bad_id, method="DELETE") + assert expected_http_error(e, 404, "Kernel does not exist: " + bad_id) async def test_connection(jp_fetch, jp_ws_fetch, jp_http_port, jp_auth_header): - print('hello') + print("hello") # Create kernel r = await jp_fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME - }) + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) - kid = json.loads(r.body.decode())['id'] + kid = json.loads(r.body.decode())["id"] # Get kernel info - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 0 + assert model["connections"] == 0 # Open a websocket connection. - ws = await jp_ws_fetch( - 'api', 'kernels', kid, 'channels' - ) + ws = await jp_ws_fetch("api", "kernels", kid, "channels") # Test that it was opened. - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 1 + assert model["connections"] == 1 # Close websocket ws.close() # give it some time to close on the other side: for i in range(10): - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - if model['connections'] > 0: + if model["connections"] > 0: time.sleep(0.1) else: break - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 0 - + assert model["connections"] == 0 diff --git a/jupyter_server/tests/services/kernels/test_config.py b/jupyter_server/tests/services/kernels/test_config.py index f2761fb68c..9b58a8c283 100644 --- a/jupyter_server/tests/services/kernels/test_config.py +++ b/jupyter_server/tests/services/kernels/test_config.py @@ -1,25 +1,23 @@ import pytest from traitlets.config import Config + from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager @pytest.fixture def jp_server_config(): - return Config({ - 'ServerApp': { - 'MappingKernelManager': { - 'allowed_message_types': ['kernel_info_request'] - } - } - }) + return Config( + {"ServerApp": {"MappingKernelManager": {"allowed_message_types": ["kernel_info_request"]}}} + ) def test_config(jp_serverapp): - assert jp_serverapp.kernel_manager.allowed_message_types == ['kernel_info_request'] + assert jp_serverapp.kernel_manager.allowed_message_types == ["kernel_info_request"] async def test_async_kernel_manager(jp_configurable_serverapp): - argv = ['--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager'] + argv = [ + "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager" + ] app = jp_configurable_serverapp(argv=argv) assert isinstance(app.kernel_manager, AsyncMappingKernelManager) - diff --git a/jupyter_server/tests/services/kernels/test_cull.py b/jupyter_server/tests/services/kernels/test_cull.py index 158745cf83..029ea0ba57 100644 --- a/jupyter_server/tests/services/kernels/test_cull.py +++ b/jupyter_server/tests/services/kernels/test_cull.py @@ -1,53 +1,50 @@ import asyncio import json import platform + import pytest -from traitlets.config import Config from tornado.httpclient import HTTPClientError +from traitlets.config import Config @pytest.fixture(params=["MappingKernelManager", "AsyncMappingKernelManager"]) def jp_argv(request): - return ["--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + request.param] + return [ + "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + + request.param + ] -CULL_TIMEOUT = 10 if platform.python_implementation() == 'PyPy' else 5 +CULL_TIMEOUT = 10 if platform.python_implementation() == "PyPy" else 5 CULL_INTERVAL = 1 @pytest.fixture def jp_server_config(): - return Config({ - 'ServerApp': { - 'MappingKernelManager': { - 'cull_idle_timeout': CULL_TIMEOUT, - 'cull_interval': CULL_INTERVAL, - 'cull_connected': False + return Config( + { + "ServerApp": { + "MappingKernelManager": { + "cull_idle_timeout": CULL_TIMEOUT, + "cull_interval": CULL_INTERVAL, + "cull_connected": False, + } } } - }) + ) async def test_culling(jp_fetch, jp_ws_fetch): - r = await jp_fetch( - 'api', 'kernels', - method='POST', - allow_nonstandard_methods=True - ) + r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) - kid = kernel['id'] + kid = kernel["id"] # Open a websocket connection. - ws = await jp_ws_fetch( - 'api', 'kernels', kid, 'channels' - ) + ws = await jp_ws_fetch("api", "kernels", kid, "channels") - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 1 + assert model["connections"] == 1 culled = await get_cull_status(kid, jp_fetch) # connected, should not be culled assert not culled ws.close() @@ -58,12 +55,11 @@ async def test_culling(jp_fetch, jp_ws_fetch): async def get_cull_status(kid, jp_fetch): frequency = 0.5 culled = False - for _ in range(int((CULL_TIMEOUT + CULL_INTERVAL)/frequency)): # Timeout + Interval will ensure cull + for _ in range( + int((CULL_TIMEOUT + CULL_INTERVAL) / frequency) + ): # Timeout + Interval will ensure cull try: - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") json.loads(r.body.decode()) except HTTPClientError as e: assert e.code == 404 diff --git a/jupyter_server/tests/services/kernelspecs/test_api.py b/jupyter_server/tests/services/kernelspecs/test_api.py index f5cc3a9a07..9004d3ea9e 100644 --- a/jupyter_server/tests/services/kernelspecs/test_api.py +++ b/jupyter_server/tests/services/kernelspecs/test_api.py @@ -1,102 +1,79 @@ -import pytest import json +import pytest import tornado - from jupyter_client.kernelspec import NATIVE_KERNEL_NAME -from ...utils import expected_http_error, some_resource + +from ...utils import expected_http_error +from ...utils import some_resource async def test_list_kernelspecs_bad(jp_fetch, jp_kernelspecs, jp_data_dir): - bad_kernel_dir = jp_data_dir.joinpath(jp_data_dir, 'kernels', 'bad') + bad_kernel_dir = jp_data_dir.joinpath(jp_data_dir, "kernels", "bad") bad_kernel_dir.mkdir(parents=True) - bad_kernel_json = bad_kernel_dir.joinpath('kernel.json') - bad_kernel_json.write_text('garbage') + bad_kernel_json = bad_kernel_dir.joinpath("kernel.json") + bad_kernel_json.write_text("garbage") - r = await jp_fetch( - 'api', 'kernelspecs', - method='GET' - ) + r = await jp_fetch("api", "kernelspecs", method="GET") model = json.loads(r.body.decode()) assert isinstance(model, dict) - assert model['default'] == NATIVE_KERNEL_NAME - specs = model['kernelspecs'] + assert model["default"] == NATIVE_KERNEL_NAME + specs = model["kernelspecs"] assert isinstance(specs, dict) assert len(specs) > 2 async def test_list_kernelspecs(jp_fetch, jp_kernelspecs): - r = await jp_fetch( - 'api', 'kernelspecs', - method='GET' - ) + r = await jp_fetch("api", "kernelspecs", method="GET") model = json.loads(r.body.decode()) assert isinstance(model, dict) - assert model['default'] == NATIVE_KERNEL_NAME - specs = model['kernelspecs'] + assert model["default"] == NATIVE_KERNEL_NAME + specs = model["kernelspecs"] assert isinstance(specs, dict) assert len(specs) > 2 def is_sample_kernelspec(s): - return s['name'] == 'sample' and s['spec']['display_name'] == 'Test kernel' + return s["name"] == "sample" and s["spec"]["display_name"] == "Test kernel" def is_default_kernelspec(s): - return s['name'] == NATIVE_KERNEL_NAME and s['spec']['display_name'].startswith("Python") + return s["name"] == NATIVE_KERNEL_NAME and s["spec"]["display_name"].startswith("Python") assert any(is_sample_kernelspec(s) for s in specs.values()), specs assert any(is_default_kernelspec(s) for s in specs.values()), specs async def test_get_kernelspecs(jp_fetch, jp_kernelspecs): - r = await jp_fetch( - 'api', 'kernelspecs', 'Sample', - method='GET' - ) + r = await jp_fetch("api", "kernelspecs", "Sample", method="GET") model = json.loads(r.body.decode()) - assert model['name'].lower() == 'sample' - assert isinstance(model['spec'], dict) - assert model['spec']['display_name'] == 'Test kernel' - assert isinstance(model['resources'], dict) + assert model["name"].lower() == "sample" + assert isinstance(model["spec"], dict) + assert model["spec"]["display_name"] == "Test kernel" + assert isinstance(model["resources"], dict) async def test_get_kernelspec_spaces(jp_fetch, jp_kernelspecs): - r = await jp_fetch( - 'api', 'kernelspecs', 'sample%202', - method='GET' - ) + r = await jp_fetch("api", "kernelspecs", "sample%202", method="GET") model = json.loads(r.body.decode()) - assert model['name'].lower() == 'sample 2' + assert model["name"].lower() == "sample 2" async def test_get_nonexistant_kernelspec(jp_fetch, jp_kernelspecs): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch( - 'api', 'kernelspecs', 'nonexistant', - method='GET' - ) + await jp_fetch("api", "kernelspecs", "nonexistant", method="GET") assert expected_http_error(e, 404) async def test_get_kernel_resource_file(jp_fetch, jp_kernelspecs): - r = await jp_fetch( - 'kernelspecs', 'sAmple', 'resource.txt', - method='GET' - ) - res = r.body.decode('utf-8') + r = await jp_fetch("kernelspecs", "sAmple", "resource.txt", method="GET") + res = r.body.decode("utf-8") assert res == some_resource async def test_get_nonexistant_resource(jp_fetch, jp_kernelspecs): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch( - 'kernelspecs', 'nonexistant', 'resource.txt', - method='GET' - ) + await jp_fetch("kernelspecs", "nonexistant", "resource.txt", method="GET") assert expected_http_error(e, 404) with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch( - 'kernelspecs', 'sample', 'nonexistant.txt', - method='GET' - ) + await jp_fetch("kernelspecs", "sample", "nonexistant.txt", method="GET") assert expected_http_error(e, 404) diff --git a/jupyter_server/tests/services/nbconvert/test_api.py b/jupyter_server/tests/services/nbconvert/test_api.py index 1e935655a4..5b1e0d6e3b 100644 --- a/jupyter_server/tests/services/nbconvert/test_api.py +++ b/jupyter_server/tests/services/nbconvert/test_api.py @@ -2,12 +2,9 @@ async def test_list_formats(jp_fetch): - r = await jp_fetch( - 'api', 'nbconvert', - method='GET' - ) + r = await jp_fetch("api", "nbconvert", method="GET") formats = json.loads(r.body.decode()) assert isinstance(formats, dict) - assert 'python' in formats - assert 'html' in formats - assert formats['python']['output_mimetype'] == 'text/x-python' + assert "python" in formats + assert "html" in formats + assert formats["python"]["output_mimetype"] == "text/x-python" diff --git a/jupyter_server/tests/services/sessions/test_api.py b/jupyter_server/tests/services/sessions/test_api.py index c7c06e24c0..2882ab7b0e 100644 --- a/jupyter_server/tests/services/sessions/test_api.py +++ b/jupyter_server/tests/services/sessions/test_api.py @@ -1,14 +1,12 @@ -import time import json import shutil -import pytest +import time +import pytest import tornado - from jupyter_client.ioloop import AsyncIOLoopKernelManager - -from nbformat.v4 import new_notebook from nbformat import writes +from nbformat.v4 import new_notebook from traitlets import default from ...utils import expected_http_error @@ -20,8 +18,7 @@ class NewPortsKernelManager(AsyncIOLoopKernelManager): - - @default('cache_ports') + @default("cache_ports") def _default_cache_ports(self) -> bool: return False @@ -31,22 +28,28 @@ async def restart_kernel(self, now: bool = False, newports: bool = True, **kw) - class NewPortsMappingKernelManager(AsyncMappingKernelManager): - - @default('kernel_manager_class') + @default("kernel_manager_class") def _default_kernel_manager_class(self): self.log.debug("NewPortsMappingKernelManager in _default_kernel_manager_class!") return "jupyter_server.tests.services.sessions.test_api.NewPortsKernelManager" -@pytest.fixture(params=["MappingKernelManager", "AsyncMappingKernelManager", "NewPortsMappingKernelManager"]) +@pytest.fixture( + params=["MappingKernelManager", "AsyncMappingKernelManager", "NewPortsMappingKernelManager"] +) def jp_argv(request): if request.param == "NewPortsMappingKernelManager": - return ["--ServerApp.kernel_manager_class=jupyter_server.tests.services.sessions.test_api." + request.param] - return ["--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + request.param] + return [ + "--ServerApp.kernel_manager_class=jupyter_server.tests.services.sessions.test_api." + + request.param + ] + return [ + "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + + request.param + ] class SessionClient: - def __init__(self, fetch_callable): self.jp_fetch = fetch_callable @@ -55,90 +58,66 @@ async def _req(self, *args, method, body=None): body = json.dumps(body) r = await self.jp_fetch( - 'api', 'sessions', *args, - method=method, - body=body, - allow_nonstandard_methods=True + "api", "sessions", *args, method=method, body=body, allow_nonstandard_methods=True ) return r async def list(self): - return await self._req(method='GET') + return await self._req(method="GET") async def get(self, id): - return await self._req(id, method='GET') - - async def create( - self, - path, - type='notebook', - kernel_name='python', - kernel_id=None): - body = { - 'path': path, - 'type': type, - 'kernel': { - 'name': kernel_name, - 'id': kernel_id - } - } - return await self._req(method='POST', body=body) + return await self._req(id, method="GET") + + async def create(self, path, type="notebook", kernel_name="python", kernel_id=None): + body = {"path": path, "type": type, "kernel": {"name": kernel_name, "id": kernel_id}} + return await self._req(method="POST", body=body) def create_deprecated(self, path): - body = { - 'notebook': { - 'path': path - }, - 'kernel': { - 'name': 'python', - 'id': 'foo' - } - } - return self._req(method='POST', body=body) + body = {"notebook": {"path": path}, "kernel": {"name": "python", "id": "foo"}} + return self._req(method="POST", body=body) def modify_path(self, id, path): - body = {'path': path} - return self._req(id, method='PATCH', body=body) + body = {"path": path} + return self._req(id, method="PATCH", body=body) def modify_path_deprecated(self, id, path): - body = {'notebook': {'path': path}} - return self._req(id, method='PATCH', body=body) + body = {"notebook": {"path": path}} + return self._req(id, method="PATCH", body=body) def modify_type(self, id, type): - body = {'type': type} - return self._req(id, method='PATCH', body=body) + body = {"type": type} + return self._req(id, method="PATCH", body=body) def modify_kernel_name(self, id, kernel_name): - body = {'kernel': {'name': kernel_name}} - return self._req(id, method='PATCH', body=body) + body = {"kernel": {"name": kernel_name}} + return self._req(id, method="PATCH", body=body) def modify_kernel_id(self, id, kernel_id): # Also send a dummy name to show that id takes precedence. - body = {'kernel': {'id': kernel_id, 'name': 'foo'}} - return self._req(id, method='PATCH', body=body) + body = {"kernel": {"id": kernel_id, "name": "foo"}} + return self._req(id, method="PATCH", body=body) async def delete(self, id): - return await self._req(id, method='DELETE') + return await self._req(id, method="DELETE") async def cleanup(self): resp = await self.list() sessions = j(resp) for session in sessions: - await self.delete(session['id']) + await self.delete(session["id"]) time.sleep(0.1) - @pytest.fixture def session_client(jp_root_dir, jp_fetch): - subdir = jp_root_dir.joinpath('foo') + subdir = jp_root_dir.joinpath("foo") subdir.mkdir() # Write a notebook to subdir. nb = new_notebook() nb_str = writes(nb, version=4) - nbpath = subdir.joinpath('nb1.ipynb') - nbpath.write_text(nb_str, encoding='utf-8') + nbpath = subdir.joinpath("nb1.ipynb") + nbpath.write_text(nb_str, encoding="utf-8") # Yield a session client client = SessionClient(jp_fetch) @@ -149,27 +128,27 @@ def session_client(jp_root_dir, jp_fetch): def assert_kernel_equality(actual, expected): - """ Compares kernel models after taking into account that execution_states + """Compares kernel models after taking into account that execution_states may differ from 'starting' to 'idle'. The 'actual' argument is the current state (which may have an 'idle' status) while the 'expected' argument is the previous state (which may have a 'starting' status). """ - actual.pop('execution_state', None) - actual.pop('last_activity', None) - expected.pop('execution_state', None) - expected.pop('last_activity', None) + actual.pop("execution_state", None) + actual.pop("last_activity", None) + expected.pop("execution_state", None) + expected.pop("last_activity", None) assert actual == expected def assert_session_equality(actual, expected): - """ Compares session models. `actual` is the most current session, + """Compares session models. `actual` is the most current session, while `expected` is the target of the comparison. This order matters when comparing the kernel sub-models. """ - assert actual['id'] == expected['id'] - assert actual['path'] == expected['path'] - assert actual['type'] == expected['type'] - assert_kernel_equality(actual['kernel'], expected['kernel']) + assert actual["id"] == expected["id"] + assert actual["path"] == expected["path"] + assert actual["type"] == expected["type"] + assert_kernel_equality(actual["kernel"], expected["kernel"]) async def test_create(session_client, jp_base_url): @@ -179,13 +158,15 @@ async def test_create(session_client, jp_base_url): assert len(sessions) == 0 # Create a session. - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") assert resp.code == 201 new_session = j(resp) - assert 'id' in new_session - assert new_session['path'] == 'foo/nb1.ipynb' - assert new_session['type'] == 'notebook' - assert resp.headers['Location'] == url_path_join(jp_base_url, '/api/sessions/', new_session['id']) + assert "id" in new_session + assert new_session["path"] == "foo/nb1.ipynb" + assert new_session["type"] == "notebook" + assert resp.headers["Location"] == url_path_join( + jp_base_url, "/api/sessions/", new_session["id"] + ) # Check that the new session appears in list. resp = await session_client.list() @@ -194,7 +175,7 @@ async def test_create(session_client, jp_base_url): assert_session_equality(sessions[0], new_session) # Retrieve that session. - sid = new_session['id'] + sid = new_session["id"] resp = await session_client.get(sid) got = j(resp) assert_session_equality(got, new_session) @@ -204,47 +185,49 @@ async def test_create(session_client, jp_base_url): async def test_create_file_session(session_client): - resp = await session_client.create('foo/nb1.py', type='file') + resp = await session_client.create("foo/nb1.py", type="file") assert resp.code == 201 newsession = j(resp) - assert newsession['path'] == 'foo/nb1.py' - assert newsession['type'] == 'file' + assert newsession["path"] == "foo/nb1.py" + assert newsession["type"] == "file" await session_client.cleanup() async def test_create_console_session(session_client): - resp = await session_client.create('foo/abc123', type='console') + resp = await session_client.create("foo/abc123", type="console") assert resp.code == 201 newsession = j(resp) - assert newsession['path'] == 'foo/abc123' - assert newsession['type'] == 'console' + assert newsession["path"] == "foo/abc123" + assert newsession["type"] == "console" # Need to find a better solution to this. await session_client.cleanup() async def test_create_deprecated(session_client): - resp = await session_client.create_deprecated('foo/nb1.ipynb') + resp = await session_client.create_deprecated("foo/nb1.ipynb") assert resp.code == 201 newsession = j(resp) - assert newsession['path'] == 'foo/nb1.ipynb' - assert newsession['type'] == 'notebook' - assert newsession['notebook']['path'] == 'foo/nb1.ipynb' + assert newsession["path"] == "foo/nb1.ipynb" + assert newsession["type"] == "notebook" + assert newsession["notebook"]["path"] == "foo/nb1.ipynb" # Need to find a better solution to this. await session_client.cleanup() async def test_create_with_kernel_id(session_client, jp_fetch, jp_base_url): # create a new kernel - resp = await jp_fetch('api/kernels', method='POST', allow_nonstandard_methods=True) + resp = await jp_fetch("api/kernels", method="POST", allow_nonstandard_methods=True) kernel = j(resp) - resp = await session_client.create('foo/nb1.ipynb', kernel_id=kernel['id']) + resp = await session_client.create("foo/nb1.ipynb", kernel_id=kernel["id"]) assert resp.code == 201 new_session = j(resp) - assert 'id' in new_session - assert new_session['path'] == 'foo/nb1.ipynb' - assert new_session['kernel']['id'] == kernel['id'] - assert resp.headers['Location'] == url_path_join(jp_base_url, '/api/sessions/{0}'.format(new_session['id'])) + assert "id" in new_session + assert new_session["path"] == "foo/nb1.ipynb" + assert new_session["kernel"]["id"] == kernel["id"] + assert resp.headers["Location"] == url_path_join( + jp_base_url, "/api/sessions/{0}".format(new_session["id"]) + ) resp = await session_client.list() sessions = j(resp) @@ -252,17 +235,18 @@ async def test_create_with_kernel_id(session_client, jp_fetch, jp_base_url): assert_session_equality(sessions[0], new_session) # Retrieve it - sid = new_session['id'] + sid = new_session["id"] resp = await session_client.get(sid) got = j(resp) assert_session_equality(got, new_session) # Need to find a better solution to this. await session_client.cleanup() + async def test_delete(session_client): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) - sid = newsession['id'] + sid = newsession["id"] resp = await session_client.delete(sid) assert resp.code == 204 @@ -277,88 +261,92 @@ async def test_delete(session_client): # Need to find a better solution to this. await session_client.cleanup() + async def test_modify_path(session_client): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) - sid = newsession['id'] + sid = newsession["id"] - resp = await session_client.modify_path(sid, 'nb2.ipynb') + resp = await session_client.modify_path(sid, "nb2.ipynb") changed = j(resp) - assert changed['id'] == sid - assert changed['path'] == 'nb2.ipynb' + assert changed["id"] == sid + assert changed["path"] == "nb2.ipynb" # Need to find a better solution to this. await session_client.cleanup() + async def test_modify_path_deprecated(session_client): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) - sid = newsession['id'] + sid = newsession["id"] - resp = await session_client.modify_path_deprecated(sid, 'nb2.ipynb') + resp = await session_client.modify_path_deprecated(sid, "nb2.ipynb") changed = j(resp) - assert changed['id'] == sid - assert changed['notebook']['path'] == 'nb2.ipynb' + assert changed["id"] == sid + assert changed["notebook"]["path"] == "nb2.ipynb" # Need to find a better solution to this. await session_client.cleanup() + async def test_modify_type(session_client): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) - sid = newsession['id'] + sid = newsession["id"] - resp = await session_client.modify_type(sid, 'console') + resp = await session_client.modify_type(sid, "console") changed = j(resp) - assert changed['id'] == sid - assert changed['type'] == 'console' + assert changed["id"] == sid + assert changed["type"] == "console" # Need to find a better solution to this. await session_client.cleanup() + async def test_modify_kernel_name(session_client, jp_fetch): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") before = j(resp) - sid = before['id'] + sid = before["id"] - resp = await session_client.modify_kernel_name(sid, before['kernel']['name']) + resp = await session_client.modify_kernel_name(sid, before["kernel"]["name"]) after = j(resp) - assert after['id'] == sid - assert after['path'] == before['path'] - assert after['type'] == before['type'] - assert after['kernel']['id'] != before['kernel']['id'] + assert after["id"] == sid + assert after["path"] == before["path"] + assert after["type"] == before["type"] + assert after["kernel"]["id"] != before["kernel"]["id"] # check kernel list, to be sure previous kernel was cleaned up - resp = await jp_fetch('api/kernels', method='GET') + resp = await jp_fetch("api/kernels", method="GET") kernel_list = j(resp) - after['kernel'].pop('last_activity') - [ k.pop('last_activity') for k in kernel_list ] - assert kernel_list == [after['kernel']] + after["kernel"].pop("last_activity") + [k.pop("last_activity") for k in kernel_list] + assert kernel_list == [after["kernel"]] # Need to find a better solution to this. await session_client.cleanup() async def test_modify_kernel_id(session_client, jp_fetch): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") before = j(resp) - sid = before['id'] + sid = before["id"] # create a new kernel - resp = await jp_fetch('api/kernels', method='POST', allow_nonstandard_methods=True) + resp = await jp_fetch("api/kernels", method="POST", allow_nonstandard_methods=True) kernel = j(resp) # Attach our session to the existing kernel - resp = await session_client.modify_kernel_id(sid, kernel['id']) + resp = await session_client.modify_kernel_id(sid, kernel["id"]) after = j(resp) - assert after['id'] == sid - assert after['path'] == before['path'] - assert after['type'] == before['type'] - assert after['kernel']['id'] != before['kernel']['id'] - assert after['kernel']['id'] == kernel['id'] + assert after["id"] == sid + assert after["path"] == before["path"] + assert after["type"] == before["type"] + assert after["kernel"]["id"] != before["kernel"]["id"] + assert after["kernel"]["id"] == kernel["id"] # check kernel list, to be sure previous kernel was cleaned up - resp = await jp_fetch('api/kernels', method='GET') + resp = await jp_fetch("api/kernels", method="GET") kernel_list = j(resp) - kernel.pop('last_activity') - [ k.pop('last_activity') for k in kernel_list ] + kernel.pop("last_activity") + [k.pop("last_activity") for k in kernel_list] assert kernel_list == [kernel] # Need to find a better solution to this. @@ -368,78 +356,59 @@ async def test_modify_kernel_id(session_client, jp_fetch): async def test_restart_kernel(session_client, jp_base_url, jp_fetch, jp_ws_fetch): # Create a session. - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") assert resp.code == 201 new_session = j(resp) - assert 'id' in new_session - assert new_session['path'] == 'foo/nb1.ipynb' - assert new_session['type'] == 'notebook' - assert resp.headers['Location'] == url_path_join(jp_base_url, '/api/sessions/', new_session['id']) + assert "id" in new_session + assert new_session["path"] == "foo/nb1.ipynb" + assert new_session["type"] == "notebook" + assert resp.headers["Location"] == url_path_join( + jp_base_url, "/api/sessions/", new_session["id"] + ) - kid = new_session['kernel']['id'] + kid = new_session["kernel"]["id"] # Get kernel info - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 0 + assert model["connections"] == 0 # Open a websocket connection. - ws = await jp_ws_fetch( - 'api', 'kernels', kid, 'channels' - ) + ws = await jp_ws_fetch("api", "kernels", kid, "channels") # Test that it was opened. - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 1 + assert model["connections"] == 1 # Restart kernel r = await jp_fetch( - 'api', 'kernels', kid, 'restart', - method='POST', - allow_nonstandard_methods=True + "api", "kernels", kid, "restart", method="POST", allow_nonstandard_methods=True ) restarted_kernel = json.loads(r.body.decode()) - assert restarted_kernel['id'] == kid + assert restarted_kernel["id"] == kid # Close/open websocket ws.close() # give it some time to close on the other side: for i in range(10): - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - if model['connections'] > 0: + if model["connections"] > 0: time.sleep(0.1) else: break - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 0 + assert model["connections"] == 0 # Open a websocket connection. - await jp_ws_fetch( - 'api', 'kernels', kid, 'channels' - ) + await jp_ws_fetch("api", "kernels", kid, "channels") - r = await jp_fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 1 + assert model["connections"] == 1 # Need to find a better solution to this. await session_client.cleanup() diff --git a/jupyter_server/tests/services/sessions/test_manager.py b/jupyter_server/tests/services/sessions/test_manager.py index ac114ae997..97af3175c4 100644 --- a/jupyter_server/tests/services/sessions/test_manager.py +++ b/jupyter_server/tests/services/sessions/test_manager.py @@ -1,15 +1,15 @@ import pytest - from tornado import web -from jupyter_server.services.sessions.sessionmanager import SessionManager -from jupyter_server.services.kernels.kernelmanager import MappingKernelManager +from jupyter_server._tz import isoformat +from jupyter_server._tz import utcnow from jupyter_server.services.contents.manager import ContentsManager -from jupyter_server._tz import utcnow, isoformat +from jupyter_server.services.kernels.kernelmanager import MappingKernelManager +from jupyter_server.services.sessions.sessionmanager import SessionManager class DummyKernel(object): - def __init__(self, kernel_name='python'): + def __init__(self, kernel_name="python"): self.kernel_name = kernel_name @@ -19,19 +19,20 @@ def __init__(self, kernel_name='python'): class DummyMKM(MappingKernelManager): """MappingKernelManager interface that doesn't start kernels, for testing""" + def __init__(self, *args, **kwargs): super(DummyMKM, self).__init__(*args, **kwargs) - self.id_letters = iter(u'ABCDEFGHIJK') + self.id_letters = iter(u"ABCDEFGHIJK") def _new_id(self): return next(self.id_letters) - async def start_kernel(self, kernel_id=None, path=None, kernel_name='python', **kwargs): + async def start_kernel(self, kernel_id=None, path=None, kernel_name="python", **kwargs): kernel_id = kernel_id or self._new_id() k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name) self._kernel_connections[kernel_id] = 0 k.last_activity = dummy_date - k.execution_state = 'idle' + k.execution_state = "idle" return kernel_id async def shutdown_kernel(self, kernel_id, now=False): @@ -40,15 +41,13 @@ async def shutdown_kernel(self, kernel_id, now=False): @pytest.fixture def session_manager(): - return SessionManager( - kernel_manager=DummyMKM(), - contents_manager=ContentsManager()) + return SessionManager(kernel_manager=DummyMKM(), contents_manager=ContentsManager()) async def create_multiple_sessions(session_manager, *kwargs_list): sessions = [] for kwargs in kwargs_list: - kwargs.setdefault('type', 'notebook') + kwargs.setdefault("type", "notebook") session = await session_manager.create_session(**kwargs) sessions.append(session) return sessions @@ -56,49 +55,43 @@ async def create_multiple_sessions(session_manager, *kwargs_list): async def test_get_session(session_manager): session = await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='bar', - type='notebook' + path="/path/to/test.ipynb", kernel_name="bar", type="notebook" ) - session_id = session['id'] + session_id = session["id"] model = await session_manager.get_session(session_id=session_id) expected = { - 'id':session_id, - 'path': u'/path/to/test.ipynb', - 'notebook': {'path': u'/path/to/test.ipynb', 'name': None}, - 'type': 'notebook', - 'name': None, - 'kernel': { - 'id': 'A', - 'name': 'bar', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } + "id": session_id, + "path": u"/path/to/test.ipynb", + "notebook": {"path": u"/path/to/test.ipynb", "name": None}, + "type": "notebook", + "name": None, + "kernel": { + "id": "A", + "name": "bar", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, } assert model == expected async def test_bad_get_session(session_manager): session = await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='foo', - type='notebook' + path="/path/to/test.ipynb", kernel_name="foo", type="notebook" ) with pytest.raises(TypeError): - await session_manager.get_session(bad_id=session['id']) + await session_manager.get_session(bad_id=session["id"]) async def test_get_session_dead_kernel(session_manager): session = await session_manager.create_session( - path='/path/to/1/test1.ipynb', - kernel_name='python', - type='notebook' + path="/path/to/1/test1.ipynb", kernel_name="python", type="notebook" ) # Kill the kernel - await session_manager.kernel_manager.shutdown_kernel(session['kernel']['id']) + await session_manager.kernel_manager.shutdown_kernel(session["kernel"]["id"]) with pytest.raises(KeyError): - await session_manager.get_session(session_id=session['id']) + await session_manager.get_session(session_id=session["id"]) # no session left listed = await session_manager.list_sessions() assert listed == [] @@ -107,50 +100,52 @@ async def test_get_session_dead_kernel(session_manager): async def test_list_session(session_manager): sessions = await create_multiple_sessions( session_manager, - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.py', type='file', kernel_name='python'), - dict(path='/path/to/3', name='foo', type='console', kernel_name='python'), + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.py", type="file", kernel_name="python"), + dict(path="/path/to/3", name="foo", type="console", kernel_name="python"), ) sessions = await session_manager.list_sessions() expected = [ { - 'id':sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', - 'type': 'notebook', - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, - 'name': None, - 'kernel': { - 'id': 'A', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id':sessions[1]['id'], - 'path': u'/path/to/2/test2.py', - 'type': 'file', - 'name': None, - 'kernel': { - 'id': 'B', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id':sessions[2]['id'], - 'path': u'/path/to/3', - 'type': 'console', - 'name': 'foo', - 'kernel': { - 'id': 'C', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - } + "id": sessions[0]["id"], + "path": u"/path/to/1/test1.ipynb", + "type": "notebook", + "notebook": {"path": u"/path/to/1/test1.ipynb", "name": None}, + "name": None, + "kernel": { + "id": "A", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[1]["id"], + "path": u"/path/to/2/test2.py", + "type": "file", + "name": None, + "kernel": { + "id": "B", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[2]["id"], + "path": u"/path/to/3", + "type": "console", + "name": "foo", + "kernel": { + "id": "C", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, ] assert sessions == expected @@ -158,26 +153,26 @@ async def test_list_session(session_manager): async def test_list_sessions_dead_kernel(session_manager): sessions = await create_multiple_sessions( session_manager, - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.ipynb', kernel_name='python'), + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.ipynb", kernel_name="python"), ) # kill one of the kernels - await session_manager.kernel_manager.shutdown_kernel(sessions[0]['kernel']['id']) + await session_manager.kernel_manager.shutdown_kernel(sessions[0]["kernel"]["id"]) listed = await session_manager.list_sessions() expected = [ { - 'id': sessions[1]['id'], - 'path': u'/path/to/2/test2.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/2/test2.ipynb', 'name': None}, - 'kernel': { - 'id': 'B', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } + "id": sessions[1]["id"], + "path": u"/path/to/2/test2.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": u"/path/to/2/test2.ipynb", "name": None}, + "kernel": { + "id": "B", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, } ] assert listed == expected @@ -185,25 +180,24 @@ async def test_list_sessions_dead_kernel(session_manager): async def test_update_session(session_manager): session = await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='julia', - type='notebook' + path="/path/to/test.ipynb", kernel_name="julia", type="notebook" ) - session_id = session['id'] - await session_manager.update_session(session_id, path='/path/to/new_name.ipynb') + session_id = session["id"] + await session_manager.update_session(session_id, path="/path/to/new_name.ipynb") model = await session_manager.get_session(session_id=session_id) - expected = {'id':session_id, - 'path': u'/path/to/new_name.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/new_name.ipynb', 'name': None}, - 'kernel': { - 'id': 'A', - 'name':'julia', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } + expected = { + "id": session_id, + "path": u"/path/to/new_name.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": u"/path/to/new_name.ipynb", "name": None}, + "kernel": { + "id": "A", + "name": "julia", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, } assert model == expected @@ -211,50 +205,52 @@ async def test_update_session(session_manager): async def test_bad_update_session(session_manager): # try to update a session with a bad keyword ~ raise error session = await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='ir', - type='notegbook' + path="/path/to/test.ipynb", kernel_name="ir", type="notegbook" ) - session_id = session['id'] + session_id = session["id"] with pytest.raises(TypeError): - await session_manager.update_session(session_id=session_id, bad_kw='test.ipynb') # Bad keyword + await session_manager.update_session( + session_id=session_id, bad_kw="test.ipynb" + ) # Bad keyword async def test_delete_session(session_manager): sessions = await create_multiple_sessions( session_manager, - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.ipynb', kernel_name='python'), - dict(path='/path/to/3', name='foo', type='console', kernel_name='python'), + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.ipynb", kernel_name="python"), + dict(path="/path/to/3", name="foo", type="console", kernel_name="python"), ) - await session_manager.delete_session(sessions[1]['id']) + await session_manager.delete_session(sessions[1]["id"]) new_sessions = await session_manager.list_sessions() - expected = [{ - 'id': sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, - 'kernel': { - 'id': 'A', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id': sessions[2]['id'], - 'type': 'console', - 'path': u'/path/to/3', - 'name': 'foo', - 'kernel': { - 'id': 'C', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - } + expected = [ + { + "id": sessions[0]["id"], + "path": u"/path/to/1/test1.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": u"/path/to/1/test1.ipynb", "name": None}, + "kernel": { + "id": "A", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[2]["id"], + "type": "console", + "path": u"/path/to/3", + "name": "foo", + "kernel": { + "id": "C", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, ] assert new_sessions == expected @@ -262,12 +258,9 @@ async def test_delete_session(session_manager): async def test_bad_delete_session(session_manager): # try to delete a session that doesn't exist ~ raise error await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='python', - type='notebook' + path="/path/to/test.ipynb", kernel_name="python", type="notebook" ) with pytest.raises(TypeError): - await session_manager.delete_session(bad_kwarg='23424') # Bad keyword + await session_manager.delete_session(bad_kwarg="23424") # Bad keyword with pytest.raises(web.HTTPError): - await session_manager.delete_session(session_id='23424') # nonexistent - + await session_manager.delete_session(session_id="23424") # nonexistent diff --git a/jupyter_server/tests/test_config_manager.py b/jupyter_server/tests/test_config_manager.py index 41280939bf..5329ca82c6 100644 --- a/jupyter_server/tests/test_config_manager.py +++ b/jupyter_server/tests/test_config_manager.py @@ -7,44 +7,44 @@ def test_json(tmp_path): tmpdir = str(tmp_path) - root_data = dict(a=1, x=2, nest={'a':1, 'x':2}) - with open(os.path.join(tmpdir, 'foo.json'), 'w') as f: + root_data = dict(a=1, x=2, nest={"a": 1, "x": 2}) + with open(os.path.join(tmpdir, "foo.json"), "w") as f: json.dump(root_data, f) # also make a foo.d/ directory with multiple json files - os.makedirs(os.path.join(tmpdir, 'foo.d')) - with open(os.path.join(tmpdir, 'foo.d', 'a.json'), 'w') as f: - json.dump(dict(a=2, b=1, nest={'a':2, 'b':1}), f) - with open(os.path.join(tmpdir, 'foo.d', 'b.json'), 'w') as f: - json.dump(dict(a=3, b=2, c=3, nest={'a':3, 'b':2, 'c':3}, only_in_b={'x':1}), f) + os.makedirs(os.path.join(tmpdir, "foo.d")) + with open(os.path.join(tmpdir, "foo.d", "a.json"), "w") as f: + json.dump(dict(a=2, b=1, nest={"a": 2, "b": 1}), f) + with open(os.path.join(tmpdir, "foo.d", "b.json"), "w") as f: + json.dump(dict(a=3, b=2, c=3, nest={"a": 3, "b": 2, "c": 3}, only_in_b={"x": 1}), f) manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) - data = manager.get('foo') - assert 'a' in data - assert 'x' in data - assert 'b' not in data - assert 'c' not in data - assert data['a'] == 1 - assert 'x' in data['nest'] + data = manager.get("foo") + assert "a" in data + assert "x" in data + assert "b" not in data + assert "c" not in data + assert data["a"] == 1 + assert "x" in data["nest"] # if we write it out, it also shouldn't pick up the subdirectoy - manager.set('foo', data) - data = manager.get('foo') + manager.set("foo", data) + data = manager.get("foo") assert data == root_data manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=True) - data = manager.get('foo') - assert 'a' in data - assert 'b' in data - assert 'c' in data + data = manager.get("foo") + assert "a" in data + assert "b" in data + assert "c" in data # files should be read in order foo.d/a.json foo.d/b.json foo.json - assert data['a'] == 1 - assert data['b'] == 2 - assert data['c'] == 3 - assert data['nest']['a'] == 1 - assert data['nest']['b'] == 2 - assert data['nest']['c'] == 3 - assert data['nest']['x'] == 2 + assert data["a"] == 1 + assert data["b"] == 2 + assert data["c"] == 3 + assert data["nest"]["a"] == 1 + assert data["nest"]["b"] == 2 + assert data["nest"]["c"] == 3 + assert data["nest"]["x"] == 2 # when writing out, we don't want foo.d/*.json data to be included in the root foo.json - manager.set('foo', data) + manager.set("foo", data) manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) - data = manager.get('foo') + data = manager.get("foo") assert data == root_data diff --git a/jupyter_server/tests/test_files.py b/jupyter_server/tests/test_files.py index a1c3872354..d8e65f4e8a 100644 --- a/jupyter_server/tests/test_files.py +++ b/jupyter_server/tests/test_files.py @@ -1,34 +1,32 @@ import os -import pytest from pathlib import Path + +import pytest import tornado +from nbformat import writes +from nbformat.v4 import new_code_cell +from nbformat.v4 import new_markdown_cell +from nbformat.v4 import new_notebook +from nbformat.v4 import new_output from .utils import expected_http_error -from nbformat import writes -from nbformat.v4 import (new_notebook, - new_markdown_cell, new_code_cell, - new_output) - -@pytest.fixture(params=[ - [False, ['ÃĨ b']], - [False, ['ÃĨ b', 'ç. d']], - [True, ['.ÃĨ b']], - [True, ['ÃĨ b', '.ç d']] -]) +@pytest.fixture( + params=[[False, ["ÃĨ b"]], [False, ["ÃĨ b", "ç. d"]], [True, [".ÃĨ b"]], [True, ["ÃĨ b", ".ç d"]]] +) def maybe_hidden(request): return request.param async def fetch_expect_200(jp_fetch, *path_parts): - r = await jp_fetch('files', *path_parts, method='GET') - assert (r.body.decode() == path_parts[-1]), (path_parts, r.body) + r = await jp_fetch("files", *path_parts, method="GET") + assert r.body.decode() == path_parts[-1], (path_parts, r.body) async def fetch_expect_404(jp_fetch, *path_parts): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch('files', *path_parts, method='GET') + await jp_fetch("files", *path_parts, method="GET") assert expected_http_error(e, 404), [path_parts, e] @@ -37,7 +35,7 @@ async def test_hidden_files(jp_fetch, jp_serverapp, jp_root_dir, maybe_hidden): path = Path(jp_root_dir, *path_parts) path.mkdir(parents=True, exist_ok=True) - foos = ['foo', '.foo'] + foos = ["foo", ".foo"] for foo in foos: (path / foo).write_text(foo) @@ -45,8 +43,8 @@ async def test_hidden_files(jp_fetch, jp_serverapp, jp_root_dir, maybe_hidden): for foo in foos: await fetch_expect_404(jp_fetch, *path_parts, foo) else: - await fetch_expect_404(jp_fetch, *path_parts, '.foo') - await fetch_expect_200(jp_fetch, *path_parts, 'foo') + await fetch_expect_404(jp_fetch, *path_parts, ".foo") + await fetch_expect_200(jp_fetch, *path_parts, "foo") jp_serverapp.contents_manager.allow_hidden = True @@ -58,72 +56,59 @@ async def test_contents_manager(jp_fetch, jp_serverapp, jp_root_dir): """make sure ContentsManager returns right files (ipynb, bin, txt).""" nb = new_notebook( cells=[ - new_markdown_cell(u'Created by test Âŗ'), - new_code_cell("print(2*6)", outputs=[ - new_output("stream", text="12"), - ]) + new_markdown_cell(u"Created by test Âŗ"), + new_code_cell( + "print(2*6)", + outputs=[ + new_output("stream", text="12"), + ], + ), ] ) - jp_root_dir.joinpath('testnb.ipynb').write_text(writes(nb, version=4), encoding='utf-8') - jp_root_dir.joinpath('test.bin').write_bytes(b'\xff' + os.urandom(5)) - jp_root_dir.joinpath('test.txt').write_text('foobar') + jp_root_dir.joinpath("testnb.ipynb").write_text(writes(nb, version=4), encoding="utf-8") + jp_root_dir.joinpath("test.bin").write_bytes(b"\xff" + os.urandom(5)) + jp_root_dir.joinpath("test.txt").write_text("foobar") - r = await jp_fetch( - 'files/testnb.ipynb', - method='GET' - ) + r = await jp_fetch("files/testnb.ipynb", method="GET") assert r.code == 200 - assert 'print(2*6)' in r.body.decode('utf-8') + assert "print(2*6)" in r.body.decode("utf-8") - r = await jp_fetch( - 'files/test.bin', - method='GET' - ) + r = await jp_fetch("files/test.bin", method="GET") assert r.code == 200 - assert r.headers['content-type'] == 'application/octet-stream' - assert r.body[:1] == b'\xff' + assert r.headers["content-type"] == "application/octet-stream" + assert r.body[:1] == b"\xff" assert len(r.body) == 6 - r = await jp_fetch( - 'files/test.txt', - method='GET' - ) + r = await jp_fetch("files/test.txt", method="GET") assert r.code == 200 - assert r.headers['content-type'] == 'text/plain; charset=UTF-8' - assert r.body.decode() == 'foobar' + assert r.headers["content-type"] == "text/plain; charset=UTF-8" + assert r.body.decode() == "foobar" async def test_download(jp_fetch, jp_serverapp, jp_root_dir): - text = 'hello' - jp_root_dir.joinpath('test.txt').write_text(text) + text = "hello" + jp_root_dir.joinpath("test.txt").write_text(text) - r = await jp_fetch( - 'files', 'test.txt', - method='GET' - ) - disposition = r.headers.get('Content-Disposition', '') - assert 'attachment' not in disposition + r = await jp_fetch("files", "test.txt", method="GET") + disposition = r.headers.get("Content-Disposition", "") + assert "attachment" not in disposition - r = await jp_fetch( - 'files', 'test.txt', - method='GET', - params={'download': True} - ) - disposition = r.headers.get('Content-Disposition', '') - assert 'attachment' in disposition + r = await jp_fetch("files", "test.txt", method="GET", params={"download": True}) + disposition = r.headers.get("Content-Disposition", "") + assert "attachment" in disposition assert "filename*=utf-8''test.txt" in disposition async def test_old_files_redirect(jp_fetch, jp_serverapp, jp_root_dir): """pre-2.0 'files/' prefixed links are properly redirected""" - jp_root_dir.joinpath('files').mkdir(parents=True, exist_ok=True) - jp_root_dir.joinpath('sub', 'files').mkdir(parents=True, exist_ok=True) - - for prefix in ('', 'sub'): - jp_root_dir.joinpath(prefix, 'files', 'f1.txt').write_text(prefix + '/files/f1') - jp_root_dir.joinpath(prefix, 'files', 'f2.txt').write_text(prefix + '/files/f2') - jp_root_dir.joinpath(prefix, 'f2.txt').write_text(prefix + '/f2') - jp_root_dir.joinpath(prefix, 'f3.txt').write_text(prefix + '/f3') + jp_root_dir.joinpath("files").mkdir(parents=True, exist_ok=True) + jp_root_dir.joinpath("sub", "files").mkdir(parents=True, exist_ok=True) + + for prefix in ("", "sub"): + jp_root_dir.joinpath(prefix, "files", "f1.txt").write_text(prefix + "/files/f1") + jp_root_dir.joinpath(prefix, "files", "f2.txt").write_text(prefix + "/files/f2") + jp_root_dir.joinpath(prefix, "f2.txt").write_text(prefix + "/f2") + jp_root_dir.joinpath(prefix, "f3.txt").write_text(prefix + "/f3") # These depend on the tree handlers # diff --git a/jupyter_server/tests/test_gateway.py b/jupyter_server/tests/test_gateway.py index 86ab2908db..1949dc9b6b 100644 --- a/jupyter_server/tests/test_gateway.py +++ b/jupyter_server/tests/test_gateway.py @@ -1,29 +1,46 @@ """Test GatewayClient""" import json import os -import pytest -import tornado import uuid from datetime import datetime +from io import StringIO +from unittest.mock import patch + +import pytest +import tornado +from tornado.httpclient import HTTPRequest +from tornado.httpclient import HTTPResponse from tornado.web import HTTPError -from tornado.httpclient import HTTPRequest, HTTPResponse -from jupyter_server.gateway.managers import GatewayClient -from jupyter_server.utils import ensure_async -from unittest.mock import patch -from io import StringIO from .utils import expected_http_error +from jupyter_server.gateway.managers import GatewayClient +from jupyter_server.utils import ensure_async def generate_kernelspec(name): - argv_stanza = ['python', '-m', 'ipykernel_launcher', '-f', '{connection_file}'] - spec_stanza = {'spec': {'argv': argv_stanza, 'env': {}, 'display_name': name, 'language': 'python', 'interrupt_mode': 'signal', 'metadata': {}}} - kernelspec_stanza = {'name': name, 'spec': spec_stanza, 'resources': {}} + argv_stanza = ["python", "-m", "ipykernel_launcher", "-f", "{connection_file}"] + spec_stanza = { + "spec": { + "argv": argv_stanza, + "env": {}, + "display_name": name, + "language": "python", + "interrupt_mode": "signal", + "metadata": {}, + } + } + kernelspec_stanza = {"name": name, "spec": spec_stanza, "resources": {}} return kernelspec_stanza # We'll mock up two kernelspecs - kspec_foo and kspec_bar -kernelspecs = {'default': 'kspec_foo', 'kernelspecs': {'kspec_foo': generate_kernelspec('kspec_foo'), 'kspec_bar': generate_kernelspec('kspec_bar')}} +kernelspecs = { + "default": "kspec_foo", + "kernelspecs": { + "kspec_foo": generate_kernelspec("kspec_foo"), + "kspec_bar": generate_kernelspec("kspec_bar"), + }, +} # maintain a dictionary of expected running kernels. Key = kernel_id, Value = model. @@ -32,53 +49,59 @@ def generate_kernelspec(name): def generate_model(name): """Generate a mocked kernel model. Caller is responsible for adding model to running_kernels dictionary.""" - dt = datetime.utcnow().isoformat() + 'Z' + dt = datetime.utcnow().isoformat() + "Z" kernel_id = str(uuid.uuid4()) - model = {'id': kernel_id, 'name': name, 'last_activity': str(dt), 'execution_state': 'idle', 'connections': 1} + model = { + "id": kernel_id, + "name": name, + "last_activity": str(dt), + "execution_state": "idle", + "connections": 1, + } return model async def mock_gateway_request(url, **kwargs): - method = 'GET' - if kwargs['method']: - method = kwargs['method'] + method = "GET" + if kwargs["method"]: + method = kwargs["method"] request = HTTPRequest(url=url, **kwargs) endpoint = str(url) # Fetch all kernelspecs - if endpoint.endswith('/api/kernelspecs') and method == 'GET': + if endpoint.endswith("/api/kernelspecs") and method == "GET": response_buf = StringIO(json.dumps(kernelspecs)) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response # Fetch named kernelspec - if endpoint.rfind('/api/kernelspecs/') >= 0 and method == 'GET': - requested_kernelspec = endpoint.rpartition('/')[2] - kspecs = kernelspecs.get('kernelspecs') + if endpoint.rfind("/api/kernelspecs/") >= 0 and method == "GET": + requested_kernelspec = endpoint.rpartition("/")[2] + kspecs = kernelspecs.get("kernelspecs") if requested_kernelspec in kspecs: response_buf = StringIO(json.dumps(kspecs.get(requested_kernelspec))) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response else: - raise HTTPError(404, message='Kernelspec does not exist: %s' % requested_kernelspec) + raise HTTPError(404, message="Kernelspec does not exist: %s" % requested_kernelspec) # Create kernel - if endpoint.endswith('/api/kernels') and method == 'POST': - json_body = json.loads(kwargs['body']) - name = json_body.get('name') - env = json_body.get('env') - kspec_name = env.get('KERNEL_KSPEC_NAME') - assert name == kspec_name # Ensure that KERNEL_ env values get propagated + if endpoint.endswith("/api/kernels") and method == "POST": + json_body = json.loads(kwargs["body"]) + name = json_body.get("name") + env = json_body.get("env") + kspec_name = env.get("KERNEL_KSPEC_NAME") + assert name == kspec_name # Ensure that KERNEL_ env values get propagated model = generate_model(name) - running_kernels[model.get('id')] = model # Register model as a running kernel + running_kernels[model.get("id")] = model # Register model as a running kernel response_buf = StringIO(json.dumps(model)) response = await ensure_async(HTTPResponse(request, 201, buffer=response_buf)) return response # Fetch list of running kernels - if endpoint.endswith('/api/kernels') and method == 'GET': + if endpoint.endswith("/api/kernels") and method == "GET": kernels = [] for kernel_id in running_kernels.keys(): model = running_kernels.get(kernel_id) @@ -88,54 +111,48 @@ async def mock_gateway_request(url, **kwargs): return response # Interrupt or restart existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'POST': - requested_kernel_id, sep, action = endpoint.rpartition('/api/kernels/')[2].rpartition('/') + if endpoint.rfind("/api/kernels/") >= 0 and method == "POST": + requested_kernel_id, sep, action = endpoint.rpartition("/api/kernels/")[2].rpartition("/") - if action == 'interrupt': + if action == "interrupt": if requested_kernel_id in running_kernels: response = await ensure_async(HTTPResponse(request, 204)) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) - elif action == 'restart': + raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) + elif action == "restart": if requested_kernel_id in running_kernels: - response_buf = StringIO( - json.dumps(running_kernels.get(requested_kernel_id)) - ) - response = await ensure_async( - HTTPResponse(request, 204, buffer=response_buf) - ) + response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) + response = await ensure_async(HTTPResponse(request, 204, buffer=response_buf)) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) else: - raise HTTPError(404, message='Bad action detected: %s' % action) + raise HTTPError(404, message="Bad action detected: %s" % action) # Shutdown existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'DELETE': - requested_kernel_id = endpoint.rpartition('/')[2] - running_kernels.pop(requested_kernel_id) # Simulate shutdown by removing kernel from running set + if endpoint.rfind("/api/kernels/") >= 0 and method == "DELETE": + requested_kernel_id = endpoint.rpartition("/")[2] + running_kernels.pop( + requested_kernel_id + ) # Simulate shutdown by removing kernel from running set response = await ensure_async(HTTPResponse(request, 204)) return response # Fetch existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'GET': - requested_kernel_id = endpoint.rpartition('/')[2] + if endpoint.rfind("/api/kernels/") >= 0 and method == "GET": + requested_kernel_id = endpoint.rpartition("/")[2] if requested_kernel_id in running_kernels: - response_buf = StringIO( - json.dumps(running_kernels.get(requested_kernel_id)) - ) - response = await ensure_async( - HTTPResponse(request, 200, buffer=response_buf) - ) + response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) + response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) -mocked_gateway = patch('jupyter_server.gateway.managers.gateway_request', mock_gateway_request) -mock_gateway_url = 'http://mock-gateway-server:8889' -mock_http_user = 'alice' +mocked_gateway = patch("jupyter_server.gateway.managers.gateway_request", mock_gateway_request) +mock_gateway_url = "http://mock-gateway-server:8889" +mock_http_user = "alice" @pytest.fixture @@ -143,10 +160,10 @@ def init_gateway(monkeypatch): """Initializes the server for use as a gateway client. """ # Clear the singleton first since previous tests may not have used a gateway. GatewayClient.clear_instance() - monkeypatch.setenv('JUPYTER_GATEWAY_URL', mock_gateway_url) - monkeypatch.setenv('JUPYTER_GATEWAY_HTTP_USER', mock_http_user) - monkeypatch.setenv('JUPYTER_GATEWAY_REQUEST_TIMEOUT', '44.4') - monkeypatch.setenv('JUPYTER_GATEWAY_CONNECT_TIMEOUT', '44.4') + monkeypatch.setenv("JUPYTER_GATEWAY_URL", mock_gateway_url) + monkeypatch.setenv("JUPYTER_GATEWAY_HTTP_USER", mock_http_user) + monkeypatch.setenv("JUPYTER_GATEWAY_REQUEST_TIMEOUT", "44.4") + monkeypatch.setenv("JUPYTER_GATEWAY_CONNECT_TIMEOUT", "44.4") yield GatewayClient.clear_instance() @@ -155,19 +172,23 @@ async def test_gateway_env_options(init_gateway, jp_serverapp): assert jp_serverapp.gateway_config.gateway_enabled is True assert jp_serverapp.gateway_config.url == mock_gateway_url assert jp_serverapp.gateway_config.http_user == mock_http_user - assert jp_serverapp.gateway_config.connect_timeout == jp_serverapp.gateway_config.request_timeout + assert ( + jp_serverapp.gateway_config.connect_timeout == jp_serverapp.gateway_config.request_timeout + ) assert jp_serverapp.gateway_config.connect_timeout == 44.4 GatewayClient.instance().init_static_args() - assert GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == int(jp_serverapp.gateway_config.request_timeout) + assert GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == int( + jp_serverapp.gateway_config.request_timeout + ) async def test_gateway_cli_options(jp_configurable_serverapp): argv = [ - '--gateway-url=' + mock_gateway_url, - '--GatewayClient.http_user=' + mock_http_user, - '--GatewayClient.connect_timeout=44.4', - '--GatewayClient.request_timeout=96.0' + "--gateway-url=" + mock_gateway_url, + "--GatewayClient.http_user=" + mock_http_user, + "--GatewayClient.connect_timeout=44.4", + "--GatewayClient.request_timeout=96.0", ] GatewayClient.clear_instance() @@ -179,47 +200,40 @@ async def test_gateway_cli_options(jp_configurable_serverapp): assert app.gateway_config.connect_timeout == 44.4 assert app.gateway_config.request_timeout == 96.0 GatewayClient.instance().init_static_args() - assert GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == 96 # Ensure KLT gets set from request-timeout + assert ( + GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == 96 + ) # Ensure KLT gets set from request-timeout GatewayClient.clear_instance() async def test_gateway_class_mappings(init_gateway, jp_serverapp): # Ensure appropriate class mappings are in place. - assert jp_serverapp.kernel_manager_class.__name__ == 'GatewayMappingKernelManager' - assert jp_serverapp.session_manager_class.__name__ == 'GatewaySessionManager' - assert jp_serverapp.kernel_spec_manager_class.__name__ == 'GatewayKernelSpecManager' + assert jp_serverapp.kernel_manager_class.__name__ == "GatewayMappingKernelManager" + assert jp_serverapp.session_manager_class.__name__ == "GatewaySessionManager" + assert jp_serverapp.kernel_spec_manager_class.__name__ == "GatewayKernelSpecManager" async def test_gateway_get_kernelspecs(init_gateway, jp_fetch): # Validate that kernelspecs come from gateway. with mocked_gateway: - r = await jp_fetch( - 'api', 'kernelspecs', - method='GET' - ) + r = await jp_fetch("api", "kernelspecs", method="GET") assert r.code == 200 - content = json.loads(r.body.decode('utf-8')) - kspecs = content.get('kernelspecs') + content = json.loads(r.body.decode("utf-8")) + kspecs = content.get("kernelspecs") assert len(kspecs) == 2 - assert kspecs.get('kspec_bar').get('name') == 'kspec_bar' + assert kspecs.get("kspec_bar").get("name") == "kspec_bar" async def test_gateway_get_named_kernelspec(init_gateway, jp_fetch): # Validate that a specific kernelspec can be retrieved from gateway (and an invalid spec can't) with mocked_gateway: - r = await jp_fetch( - 'api', 'kernelspecs', 'kspec_foo', - method='GET' - ) + r = await jp_fetch("api", "kernelspecs", "kspec_foo", method="GET") assert r.code == 200 - kspec_foo = json.loads(r.body.decode('utf-8')) - assert kspec_foo.get('name') == 'kspec_foo' + kspec_foo = json.loads(r.body.decode("utf-8")) + assert kspec_foo.get("name") == "kspec_foo" with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch( - 'api', 'kernelspecs', 'no_such_spec', - method='GET' - ) + await jp_fetch("api", "kernelspecs", "no_such_spec", method="GET") assert expected_http_error(e, 404) @@ -227,7 +241,7 @@ async def test_gateway_session_lifecycle(init_gateway, jp_root_dir, jp_fetch): # Validate session lifecycle functions; create and delete. # create - session_id, kernel_id = await create_session(jp_root_dir, jp_fetch, 'kspec_foo') + session_id, kernel_id = await create_session(jp_root_dir, jp_fetch, "kspec_foo") # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True @@ -253,7 +267,7 @@ async def test_gateway_kernel_lifecycle(init_gateway, jp_fetch): # Validate kernel lifecycle functions; create, interrupt, restart and delete. # create - kernel_id = await create_kernel(jp_fetch, 'kspec_bar') + kernel_id = await create_kernel(jp_fetch, "kspec_bar") # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True @@ -280,135 +294,108 @@ async def test_gateway_kernel_lifecycle(init_gateway, jp_fetch): # async def create_session(root_dir, jp_fetch, kernel_name): """Creates a session for a kernel. The session is created against the server - which then uses the gateway for kernel management. + which then uses the gateway for kernel management. """ with mocked_gateway: - nb_path = root_dir / 'testgw.ipynb' - body = json.dumps({'path': str(nb_path), - 'type': 'notebook', - 'kernel': {'name': kernel_name}}) + nb_path = root_dir / "testgw.ipynb" + body = json.dumps( + {"path": str(nb_path), "type": "notebook", "kernel": {"name": kernel_name}} + ) # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method - os.environ['KERNEL_KSPEC_NAME'] = kernel_name + os.environ["KERNEL_KSPEC_NAME"] = kernel_name # Create the kernel... (also tests get_kernel) - r = await jp_fetch( - 'api', 'sessions', - method='POST', - body=body - ) + r = await jp_fetch("api", "sessions", method="POST", body=body) assert r.code == 201 - model = json.loads(r.body.decode('utf-8')) - assert model.get('path') == str(nb_path) - kernel_id = model.get('kernel').get('id') + model = json.loads(r.body.decode("utf-8")) + assert model.get("path") == str(nb_path) + kernel_id = model.get("kernel").get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(kernel_id) - assert kernel_id == running_kernel.get('id') - assert model.get('kernel').get('name') == running_kernel.get('name') - session_id = model.get('id') + assert kernel_id == running_kernel.get("id") + assert model.get("kernel").get("name") == running_kernel.get("name") + session_id = model.get("id") # restore env - os.environ.pop('KERNEL_KSPEC_NAME') + os.environ.pop("KERNEL_KSPEC_NAME") return session_id, kernel_id async def delete_session(jp_fetch, session_id): - """Deletes a session corresponding to the given session id. - """ + """Deletes a session corresponding to the given session id.""" with mocked_gateway: # Delete the session (and kernel) - r = await jp_fetch( - 'api', 'sessions', session_id, - method='DELETE' - ) + r = await jp_fetch("api", "sessions", session_id, method="DELETE") assert r.code == 204 - assert r.reason == 'No Content' + assert r.reason == "No Content" async def is_kernel_running(jp_fetch, kernel_id): - """Issues request to get the set of running kernels - """ + """Issues request to get the set of running kernels""" with mocked_gateway: # Get list of running kernels - r = await jp_fetch( - 'api', 'kernels', - method='GET' - ) + r = await jp_fetch("api", "kernels", method="GET") assert r.code == 200 - kernels = json.loads(r.body.decode('utf-8')) + kernels = json.loads(r.body.decode("utf-8")) assert len(kernels) == len(running_kernels) for model in kernels: - if model.get('id') == kernel_id: + if model.get("id") == kernel_id: return True return False async def create_kernel(jp_fetch, kernel_name): - """Issues request to retart the given kernel - """ + """Issues request to retart the given kernel""" with mocked_gateway: - body = json.dumps({'name': kernel_name}) + body = json.dumps({"name": kernel_name}) # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method - os.environ['KERNEL_KSPEC_NAME'] = kernel_name + os.environ["KERNEL_KSPEC_NAME"] = kernel_name - r = await jp_fetch( - 'api', 'kernels', - method='POST', - body=body - ) + r = await jp_fetch("api", "kernels", method="POST", body=body) assert r.code == 201 - model = json.loads(r.body.decode('utf-8')) - kernel_id = model.get('id') + model = json.loads(r.body.decode("utf-8")) + kernel_id = model.get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(kernel_id) - assert kernel_id == running_kernel.get('id') - assert model.get('name') == kernel_name + assert kernel_id == running_kernel.get("id") + assert model.get("name") == kernel_name # restore env - os.environ.pop('KERNEL_KSPEC_NAME') + os.environ.pop("KERNEL_KSPEC_NAME") return kernel_id async def interrupt_kernel(jp_fetch, kernel_id): - """Issues request to interrupt the given kernel - """ + """Issues request to interrupt the given kernel""" with mocked_gateway: r = await jp_fetch( - 'api', 'kernels', kernel_id, 'interrupt', - method='POST', - allow_nonstandard_methods=True + "api", "kernels", kernel_id, "interrupt", method="POST", allow_nonstandard_methods=True ) assert r.code == 204 - assert r.reason == 'No Content' + assert r.reason == "No Content" async def restart_kernel(jp_fetch, kernel_id): - """Issues request to retart the given kernel - """ + """Issues request to retart the given kernel""" with mocked_gateway: r = await jp_fetch( - 'api', 'kernels', kernel_id, 'restart', - method='POST', - allow_nonstandard_methods=True + "api", "kernels", kernel_id, "restart", method="POST", allow_nonstandard_methods=True ) assert r.code == 200 - model = json.loads(r.body.decode('utf-8')) - restarted_kernel_id = model.get('id') + model = json.loads(r.body.decode("utf-8")) + restarted_kernel_id = model.get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(restarted_kernel_id) - assert restarted_kernel_id == running_kernel.get('id') - assert model.get('name') == running_kernel.get('name') + assert restarted_kernel_id == running_kernel.get("id") + assert model.get("name") == running_kernel.get("name") async def delete_kernel(jp_fetch, kernel_id): - """Deletes kernel corresponding to the given kernel id. - """ + """Deletes kernel corresponding to the given kernel id.""" with mocked_gateway: # Delete the session (and kernel) - r = await jp_fetch( - 'api', 'kernels', kernel_id, - method='DELETE' - ) + r = await jp_fetch("api", "kernels", kernel_id, method="DELETE") assert r.code == 204 - assert r.reason == 'No Content' + assert r.reason == "No Content" diff --git a/jupyter_server/tests/test_paths.py b/jupyter_server/tests/test_paths.py index 60c2951a13..d185234389 100644 --- a/jupyter_server/tests/test_paths.py +++ b/jupyter_server/tests/test_paths.py @@ -1,54 +1,60 @@ import re + import pytest import tornado + from jupyter_server.base.handlers import path_regex from jupyter_server.utils import url_path_join # build regexps that tornado uses: -path_pat = re.compile('^' + '/x%s' % path_regex + '$') +path_pat = re.compile("^" + "/x%s" % path_regex + "$") + def test_path_regex(): for path in ( - '/x', - '/x/', - '/x/foo', - '/x/foo.ipynb', - '/x/foo/bar', - '/x/foo/bar.txt', + "/x", + "/x/", + "/x/foo", + "/x/foo.ipynb", + "/x/foo/bar", + "/x/foo/bar.txt", ): assert re.match(path_pat, path) + def test_path_regex_bad(): for path in ( - '/xfoo', - '/xfoo/', - '/xfoo/bar', - '/xfoo/bar/', - '/x/foo/bar/', - '/x//foo', - '/y', - '/y/x/foo', + "/xfoo", + "/xfoo/", + "/xfoo/bar", + "/xfoo/bar/", + "/x/foo/bar/", + "/x//foo", + "/y", + "/y/x/foo", ): assert re.match(path_pat, path) is None @pytest.mark.parametrize( - 'uri,expected', + "uri,expected", [ ("/notebooks/mynotebook/", "/notebooks/mynotebook"), ("////foo///", "/foo"), ("//example.com/", "/example.com"), ("/has/param/?hasparam=true", "/has/param?hasparam=true"), - ] + ], ) -async def test_trailing_slash(jp_ensure_app_fixture, uri, expected, http_server_client, jp_auth_header, jp_base_url): +async def test_trailing_slash( + jp_ensure_app_fixture, uri, expected, http_server_client, jp_auth_header, jp_base_url +): # http_server_client raises an exception when follow_redirects=False with pytest.raises(tornado.httpclient.HTTPClientError) as err: await http_server_client.fetch( url_path_join(jp_base_url, uri), headers=jp_auth_header, request_timeout=20, - follow_redirects=False + follow_redirects=False, ) # Capture the response from the raised exception value. response = err.value.response diff --git a/jupyter_server/tests/test_serialize.py b/jupyter_server/tests/test_serialize.py index 07947dc549..56314df94c 100644 --- a/jupyter_server/tests/test_serialize.py +++ b/jupyter_server/tests/test_serialize.py @@ -1,25 +1,24 @@ """Test serialize/deserialize messages with buffers""" - import os from jupyter_client.session import Session -from jupyter_server.base.zmqhandlers import ( - serialize_binary_message, - deserialize_binary_message, -) + +from jupyter_server.base.zmqhandlers import deserialize_binary_message +from jupyter_server.base.zmqhandlers import serialize_binary_message + def test_serialize_binary(): s = Session() - msg = s.msg('data_pub', content={'a': 'b'}) - msg['buffers'] = [ memoryview(os.urandom(3)) for i in range(3) ] + msg = s.msg("data_pub", content={"a": "b"}) + msg["buffers"] = [memoryview(os.urandom(3)) for i in range(3)] bmsg = serialize_binary_message(msg) assert isinstance(bmsg, bytes) def test_deserialize_binary(): s = Session() - msg = s.msg('data_pub', content={'a': 'b'}) - msg['buffers'] = [ memoryview(os.urandom(2)) for i in range(3) ] + msg = s.msg("data_pub", content={"a": "b"}) + msg["buffers"] = [memoryview(os.urandom(2)) for i in range(3)] bmsg = serialize_binary_message(msg) msg2 = deserialize_binary_message(bmsg) - assert msg2 == msg \ No newline at end of file + assert msg2 == msg diff --git a/jupyter_server/tests/test_serverapp.py b/jupyter_server/tests/test_serverapp.py index 9d4001ca4a..3808eb4dc1 100644 --- a/jupyter_server/tests/test_serverapp.py +++ b/jupyter_server/tests/test_serverapp.py @@ -1,25 +1,23 @@ -import os import getpass -import pathlib -import pytest import logging +import os +import pathlib from unittest.mock import patch +import pytest +from jupyter_core.application import NoStart from traitlets import TraitError from traitlets.tests.utils import check_help_all_output -from jupyter_core.application import NoStart -from jupyter_server.serverapp import ( - ServerApp, - list_running_servers, - JupyterPasswordApp -) from jupyter_server.auth.security import passwd_check +from jupyter_server.serverapp import JupyterPasswordApp +from jupyter_server.serverapp import list_running_servers +from jupyter_server.serverapp import ServerApp def test_help_output(): """jupyter server --help-all works""" - check_help_all_output('jupyter_server') + check_help_all_output("jupyter_server") def test_server_info_file(tmp_path, jp_configurable_serverapp): @@ -31,9 +29,9 @@ def test_server_info_file(tmp_path, jp_configurable_serverapp): assert len(servers) == 1 sinfo = servers[0] - assert sinfo['port'] == app.port - assert sinfo['url'] == app.connection_url - assert sinfo['version'] == app.version + assert sinfo["port"] == app.port + assert sinfo["url"] == app.connection_url + assert sinfo["version"] == app.version app.remove_server_info_file() @@ -47,18 +45,12 @@ def test_root_dir(tmp_path, jp_configurable_serverapp): # Build a list of invalid paths -@pytest.fixture( - params=[ - ('notebooks',), - ('root', 'dir', 'is', 'missing'), - ('test.txt',) - ] -) +@pytest.fixture(params=[("notebooks",), ("root", "dir", "is", "missing"), ("test.txt",)]) def invalid_root_dir(tmp_path, request): path = tmp_path.joinpath(*request.param) # If the path is a file, create it. - if os.path.splitext(str(path))[1] != '': - path.write_text('') + if os.path.splitext(str(path))[1] != "": + path.write_text("") return str(path) @@ -67,13 +59,8 @@ def test_invalid_root_dir(invalid_root_dir, jp_configurable_serverapp): with pytest.raises(TraitError): app.root_dir = invalid_root_dir -@pytest.fixture( - params=[ - ('/',), - ('first-level',), - ('first-level', 'second-level') - ] -) + +@pytest.fixture(params=[("/",), ("first-level",), ("first-level", "second-level")]) def valid_root_dir(tmp_path, request): path = tmp_path.joinpath(*request.param) if not path.exists(): @@ -81,35 +68,36 @@ def valid_root_dir(tmp_path, request): path.mkdir(parents=True) return str(path) + def test_valid_root_dir(valid_root_dir, jp_configurable_serverapp): app = jp_configurable_serverapp(root_dir=valid_root_dir) root_dir = valid_root_dir # If nested path, the last slash should # be stripped by the root_dir trait. - if root_dir != '/': - root_dir = valid_root_dir.rstrip('/') + if root_dir != "/": + root_dir = valid_root_dir.rstrip("/") assert app.root_dir == root_dir def test_generate_config(tmp_path, jp_configurable_serverapp): app = jp_configurable_serverapp(config_dir=str(tmp_path)) - app.initialize(['--generate-config', '--allow-root']) + app.initialize(["--generate-config", "--allow-root"]) with pytest.raises(NoStart): app.start() - assert tmp_path.joinpath('jupyter_server_config.py').exists() + assert tmp_path.joinpath("jupyter_server_config.py").exists() def test_server_password(tmp_path, jp_configurable_serverapp): - password = 'secret' - with patch.dict( - 'os.environ', {'JUPYTER_CONFIG_DIR': str(tmp_path)} - ), patch.object(getpass, 'getpass', return_value=password): + password = "secret" + with patch.dict("os.environ", {"JUPYTER_CONFIG_DIR": str(tmp_path)}), patch.object( + getpass, "getpass", return_value=password + ): app = JupyterPasswordApp(log_level=logging.ERROR) app.initialize([]) app.start() sv = jp_configurable_serverapp() sv.load_config_file() - assert sv.password != '' + assert sv.password != "" passwd_check(sv.password, password) @@ -126,57 +114,30 @@ def prefix_path(jp_root_dir, tmp_path): Returns a pathlib Path object. """ + def _inner(rawpath): path = pathlib.PurePosixPath(rawpath) - if rawpath.startswith('/jp_root_dir'): + if rawpath.startswith("/jp_root_dir"): path = jp_root_dir.joinpath(*path.parts[2:]) - elif rawpath.startswith('/tmp_path'): + elif rawpath.startswith("/tmp_path"): path = tmp_path.joinpath(*path.parts[2:]) return pathlib.Path(path) + return _inner @pytest.mark.parametrize( "root_dir,file_to_run,expected_output", [ - ( - None, - 'notebook.ipynb', - 'notebook.ipynb' - ), - ( - None, - '/tmp_path/path/to/notebook.ipynb', - 'notebook.ipynb' - ), - ( - '/jp_root_dir', - '/tmp_path/path/to/notebook.ipynb', - SystemExit - ), - ( - '/tmp_path', - '/tmp_path/path/to/notebook.ipynb', - 'path/to/notebook.ipynb' - ), - ( - '/jp_root_dir', - 'notebook.ipynb', - 'notebook.ipynb' - ), - ( - '/jp_root_dir', - 'path/to/notebook.ipynb', - 'path/to/notebook.ipynb' - ), - ] + (None, "notebook.ipynb", "notebook.ipynb"), + (None, "/tmp_path/path/to/notebook.ipynb", "notebook.ipynb"), + ("/jp_root_dir", "/tmp_path/path/to/notebook.ipynb", SystemExit), + ("/tmp_path", "/tmp_path/path/to/notebook.ipynb", "path/to/notebook.ipynb"), + ("/jp_root_dir", "notebook.ipynb", "notebook.ipynb"), + ("/jp_root_dir", "path/to/notebook.ipynb", "path/to/notebook.ipynb"), + ], ) -def test_resolve_file_to_run_and_root_dir( - prefix_path, - root_dir, - file_to_run, - expected_output -): +def test_resolve_file_to_run_and_root_dir(prefix_path, root_dir, file_to_run, expected_output): # Verify that the Singleton instance is cleared before the test runs. ServerApp.clear_instance() @@ -213,62 +174,62 @@ def test_resolve_file_to_run_and_root_dir( # in urls shown below will be replaced with the token # generated by the ServerApp on instance creation. @pytest.mark.parametrize( - 'config,public_url,local_url,connection_url', + "config,public_url,local_url,connection_url", [ # Token is hidden when configured. ( {"token": "test"}, "http://localhost:8888/?token=...", "http://127.0.0.1:8888/?token=...", - "http://localhost:8888/" + "http://localhost:8888/", ), # Verify port number has changed ( {"port": 9999}, "http://localhost:9999/?token=", "http://127.0.0.1:9999/?token=", - "http://localhost:9999/" + "http://localhost:9999/", ), ( {"ip": "1.1.1.1"}, "http://1.1.1.1:8888/?token=", "http://127.0.0.1:8888/?token=", - "http://1.1.1.1:8888/" + "http://1.1.1.1:8888/", ), # Verify that HTTPS is returned when certfile is given ( {"certfile": "/path/to/dummy/file"}, "https://localhost:8888/?token=", "https://127.0.0.1:8888/?token=", - "https://localhost:8888/" + "https://localhost:8888/", ), # Verify changed port and a custom display URL ( {"port": 9999, "custom_display_url": "http://test.org"}, "http://test.org/?token=", "http://127.0.0.1:9999/?token=", - "http://localhost:9999/" + "http://localhost:9999/", ), ( {"base_url": "/", "default_url": "/test/"}, "http://localhost:8888/test/?token=", "http://127.0.0.1:8888/test/?token=", - "http://localhost:8888/" + "http://localhost:8888/", ), # Verify unix socket URLs are handled properly ( {"sock": "/tmp/jp-test.sock"}, "http+unix://%2Ftmp%2Fjp-test.sock/?token=", "http+unix://%2Ftmp%2Fjp-test.sock/?token=", - "http+unix://%2Ftmp%2Fjp-test.sock/" + "http+unix://%2Ftmp%2Fjp-test.sock/", ), ( {"base_url": "/", "default_url": "/test/", "sock": "/tmp/jp-test.sock"}, "http+unix://%2Ftmp%2Fjp-test.sock/test/?token=", "http+unix://%2Ftmp%2Fjp-test.sock/test/?token=", - "http+unix://%2Ftmp%2Fjp-test.sock/" + "http+unix://%2Ftmp%2Fjp-test.sock/", ), - ] + ], ) def test_urls(config, public_url, local_url, connection_url): # Verify we're working with a clean instance. @@ -299,7 +260,7 @@ def test_valid_preferred_dir(tmp_path, jp_configurable_serverapp): def test_valid_preferred_dir_is_root_subdir(tmp_path, jp_configurable_serverapp): path = str(tmp_path) - path_subdir = str(tmp_path / 'subdir') + path_subdir = str(tmp_path / "subdir") os.makedirs(path_subdir, exist_ok=True) app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) assert app.root_dir == path @@ -309,7 +270,7 @@ def test_valid_preferred_dir_is_root_subdir(tmp_path, jp_configurable_serverapp) def test_valid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp): path = str(tmp_path) - path_subdir = str(tmp_path / 'subdir') + path_subdir = str(tmp_path / "subdir") with pytest.raises(TraitError) as error: app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) @@ -318,7 +279,7 @@ def test_valid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp) def test_invalid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp): path = str(tmp_path) - path_subdir = str(tmp_path / 'subdir') + path_subdir = str(tmp_path / "subdir") with pytest.raises(TraitError) as error: app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) @@ -327,7 +288,7 @@ def test_invalid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverap def test_invalid_preferred_dir_does_not_exist_set(tmp_path, jp_configurable_serverapp): path = str(tmp_path) - path_subdir = str(tmp_path / 'subdir') + path_subdir = str(tmp_path / "subdir") app = jp_configurable_serverapp(root_dir=path) with pytest.raises(TraitError) as error: @@ -337,7 +298,7 @@ def test_invalid_preferred_dir_does_not_exist_set(tmp_path, jp_configurable_serv def test_invalid_preferred_dir_not_root_subdir(tmp_path, jp_configurable_serverapp): - path = str(tmp_path / 'subdir') + path = str(tmp_path / "subdir") os.makedirs(path, exist_ok=True) not_subdir_path = str(tmp_path) @@ -348,7 +309,7 @@ def test_invalid_preferred_dir_not_root_subdir(tmp_path, jp_configurable_servera def test_invalid_preferred_dir_not_root_subdir_set(tmp_path, jp_configurable_serverapp): - path = str(tmp_path / 'subdir') + path = str(tmp_path / "subdir") os.makedirs(path, exist_ok=True) not_subdir_path = str(tmp_path) @@ -361,7 +322,7 @@ def test_invalid_preferred_dir_not_root_subdir_set(tmp_path, jp_configurable_ser def test_observed_root_dir_updates_preferred_dir(tmp_path, jp_configurable_serverapp): path = str(tmp_path) - new_path = str(tmp_path / 'subdir') + new_path = str(tmp_path / "subdir") os.makedirs(new_path, exist_ok=True) app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) diff --git a/jupyter_server/tests/test_terminal.py b/jupyter_server/tests/test_terminal.py index f11185510a..0f41ece269 100644 --- a/jupyter_server/tests/test_terminal.py +++ b/jupyter_server/tests/test_terminal.py @@ -1,9 +1,9 @@ +import asyncio +import json import os import shutil -import pytest -import json -import asyncio +import pytest from tornado.httpclient import HTTPClientError from traitlets.config import Config @@ -14,12 +14,13 @@ def kill_all(jp_serverapp): async def _(): await jp_serverapp.web_app.settings["terminal_manager"].kill_all() + return _ @pytest.fixture def terminal_path(tmp_path): - subdir = tmp_path.joinpath('terminal_path') + subdir = tmp_path.joinpath("terminal_path") subdir.mkdir() yield subdir @@ -33,20 +34,23 @@ def terminal_path(tmp_path): @pytest.fixture def jp_server_config(): - return Config({ - 'ServerApp': { - 'TerminalManager': { - 'cull_inactive_timeout': CULL_TIMEOUT, - 'cull_interval': CULL_INTERVAL + return Config( + { + "ServerApp": { + "TerminalManager": { + "cull_inactive_timeout": CULL_TIMEOUT, + "cull_interval": CULL_INTERVAL, + } } } - }) + ) async def test_no_terminals(jp_fetch): resp_list = await jp_fetch( - 'api', 'terminals', - method='GET', + "api", + "terminals", + method="GET", allow_nonstandard_methods=True, ) @@ -57,16 +61,18 @@ async def test_no_terminals(jp_fetch): async def test_terminal_create(jp_fetch, kill_all): resp = await jp_fetch( - 'api', 'terminals', - method='POST', + "api", + "terminals", + method="POST", allow_nonstandard_methods=True, ) term = json.loads(resp.body.decode()) - assert term['name'] == "1" + assert term["name"] == "1" resp_list = await jp_fetch( - 'api', 'terminals', - method='GET', + "api", + "terminals", + method="GET", allow_nonstandard_methods=True, ) @@ -79,45 +85,47 @@ async def test_terminal_create(jp_fetch, kill_all): async def test_terminal_create_with_kwargs(jp_fetch, jp_ws_fetch, terminal_path, kill_all): resp_create = await jp_fetch( - 'api', 'terminals', - method='POST', - body=json.dumps({'cwd': str(terminal_path)}), + "api", + "terminals", + method="POST", + body=json.dumps({"cwd": str(terminal_path)}), allow_nonstandard_methods=True, ) data = json.loads(resp_create.body.decode()) - term_name = data['name'] + term_name = data["name"] resp_get = await jp_fetch( - 'api', 'terminals', term_name, - method='GET', + "api", + "terminals", + term_name, + method="GET", allow_nonstandard_methods=True, ) data = json.loads(resp_get.body.decode()) - assert data['name'] == term_name + assert data["name"] == term_name await kill_all() async def test_terminal_create_with_cwd(jp_fetch, jp_ws_fetch, terminal_path): resp = await jp_fetch( - 'api', 'terminals', - method='POST', - body=json.dumps({'cwd': str(terminal_path)}), + "api", + "terminals", + method="POST", + body=json.dumps({"cwd": str(terminal_path)}), allow_nonstandard_methods=True, ) data = json.loads(resp.body.decode()) - term_name = data['name'] + term_name = data["name"] - ws = await jp_ws_fetch( - 'terminals', 'websocket', term_name - ) + ws = await jp_ws_fetch("terminals", "websocket", term_name) - ws.write_message(json.dumps(['stdin', 'pwd\r\n'])) + ws.write_message(json.dumps(["stdin", "pwd\r\n"])) - message_stdout = '' + message_stdout = "" while True: try: message = await asyncio.wait_for(ws.read_message(), timeout=1.0) @@ -126,7 +134,7 @@ async def test_terminal_create_with_cwd(jp_fetch, jp_ws_fetch, terminal_path): message = json.loads(message) - if message[0] == 'stdout': + if message[0] == "stdout": message_stdout += message[1] ws.close() @@ -138,7 +146,7 @@ async def test_culling_config(jp_server_config, jp_configurable_serverapp): terminal_mgr_config = jp_configurable_serverapp().config.ServerApp.TerminalManager assert terminal_mgr_config.cull_inactive_timeout == CULL_TIMEOUT assert terminal_mgr_config.cull_interval == CULL_INTERVAL - terminal_mgr_settings = jp_configurable_serverapp().web_app.settings['terminal_manager'] + terminal_mgr_settings = jp_configurable_serverapp().web_app.settings["terminal_manager"] assert terminal_mgr_settings.cull_inactive_timeout == CULL_TIMEOUT assert terminal_mgr_settings.cull_interval == CULL_INTERVAL @@ -146,20 +154,23 @@ async def test_culling_config(jp_server_config, jp_configurable_serverapp): async def test_culling(jp_server_config, jp_fetch): # POST request resp = await jp_fetch( - 'api', 'terminals', - method='POST', + "api", + "terminals", + method="POST", allow_nonstandard_methods=True, ) term = json.loads(resp.body.decode()) - term_1 = term['name'] - last_activity = term['last_activity'] + term_1 = term["name"] + last_activity = term["last_activity"] culled = False for i in range(10): # Culling should occur in a few seconds try: resp = await jp_fetch( - 'api', 'terminals', term_1, - method='GET', + "api", + "terminals", + term_1, + method="GET", allow_nonstandard_methods=True, ) except HTTPClientError as e: diff --git a/jupyter_server/tests/test_traittypes.py b/jupyter_server/tests/test_traittypes.py index a0f9e2a7df..7a7be84406 100644 --- a/jupyter_server/tests/test_traittypes.py +++ b/jupyter_server/tests/test_traittypes.py @@ -1,12 +1,11 @@ import pytest -from traitlets import HasTraits, TraitError +from traitlets import HasTraits +from traitlets import TraitError from traitlets.utils.importstring import import_item -from jupyter_server.traittypes import ( - InstanceFromClasses, - TypeFromClasses -) from jupyter_server.services.contents.largefilemanager import LargeFileManager +from jupyter_server.traittypes import InstanceFromClasses +from jupyter_server.traittypes import TypeFromClasses class DummyClass: @@ -25,45 +24,37 @@ class Thing(HasTraits): int, str, DummyClass, - ] + ], ) b = TypeFromClasses( default_value=None, allow_none=True, - klasses=[ - DummyClass, - int, - 'jupyter_server.services.contents.manager.ContentsManager' - ] + klasses=[DummyClass, int, "jupyter_server.services.contents.manager.ContentsManager"], ) class TestInstanceFromClasses: - - @pytest.mark.parametrize( - 'value', - [1, 'test', DummyClass()] - ) + @pytest.mark.parametrize("value", [1, "test", DummyClass()]) def test_good_values(self, value): thing = Thing(a=value) assert thing.a == value - @pytest.mark.parametrize( - 'value', - [2.4, object()] - ) + @pytest.mark.parametrize("value", [2.4, object()]) def test_bad_values(self, value): with pytest.raises(TraitError) as e: thing = Thing(a=value) class TestTypeFromClasses: - @pytest.mark.parametrize( - 'value', - [DummyClass, DummyInt, LargeFileManager, - 'jupyter_server.services.contents.manager.ContentsManager'] + "value", + [ + DummyClass, + DummyInt, + LargeFileManager, + "jupyter_server.services.contents.manager.ContentsManager", + ], ) def test_good_values(self, value): thing = Thing(b=value) @@ -71,10 +62,7 @@ def test_good_values(self, value): value = import_item(value) assert thing.b == value - @pytest.mark.parametrize( - 'value', - [float, object] - ) + @pytest.mark.parametrize("value", [float, object]) def test_bad_values(self, value): with pytest.raises(TraitError) as e: thing = Thing(b=value) diff --git a/jupyter_server/tests/test_utils.py b/jupyter_server/tests/test_utils.py index cf04d743e7..dea714f8cc 100644 --- a/jupyter_server/tests/test_utils.py +++ b/jupyter_server/tests/test_utils.py @@ -2,39 +2,28 @@ from unittest.mock import patch import pytest - from traitlets.tests.utils import check_help_all_output -from jupyter_server.utils import ( - url_escape, - url_unescape, - is_namespace_package -) + +from jupyter_server.utils import is_namespace_package +from jupyter_server.utils import url_escape +from jupyter_server.utils import url_unescape def test_help_output(): - check_help_all_output('jupyter_server') + check_help_all_output("jupyter_server") @pytest.mark.parametrize( - 'unescaped,escaped', + "unescaped,escaped", [ + ("/this is a test/for spaces/", "/this%20is%20a%20test/for%20spaces/"), + ("notebook with space.ipynb", "notebook%20with%20space.ipynb"), + ("/path with a/notebook and space.ipynb", "/path%20with%20a/notebook%20and%20space.ipynb"), ( - '/this is a test/for spaces/', - '/this%20is%20a%20test/for%20spaces/' - ), - ( - 'notebook with space.ipynb', - 'notebook%20with%20space.ipynb' + "/ !@$#%^&* / test %^ notebook @#$ name.ipynb", + "/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb", ), - ( - '/path with a/notebook and space.ipynb', - '/path%20with%20a/notebook%20and%20space.ipynb' - ), - ( - '/ !@$#%^&* / test %^ notebook @#$ name.ipynb', - '/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb' - ) - ] + ], ) def test_url_escaping(unescaped, escaped): # Test escaping. @@ -46,26 +35,26 @@ def test_url_escaping(unescaped, escaped): @pytest.mark.parametrize( - 'name, expected', + "name, expected", [ # returns True if it is a namespace package - ('test_namespace', True), + ("test_namespace", True), # returns False if it isn't a namespace package - ('sys', False), - ('jupyter_server', False), + ("sys", False), + ("jupyter_server", False), # returns None if it isn't importable - ('not_a_python_namespace', None) - ] + ("not_a_python_namespace", None), + ], ) def test_is_namespace_package(monkeypatch, name, expected): - monkeypatch.syspath_prepend(Path(__file__).parent / 'namespace-package-test') - + monkeypatch.syspath_prepend(Path(__file__).parent / "namespace-package-test") + assert is_namespace_package(name) is expected - + def test_is_namespace_package_no_spec(): with patch("importlib.util.find_spec") as mocked_spec: mocked_spec.side_effect = ValueError() - assert is_namespace_package('dummy') is None - mocked_spec.assert_called_once_with('dummy') + assert is_namespace_package("dummy") is None + mocked_spec.assert_called_once_with("dummy") diff --git a/jupyter_server/tests/test_version.py b/jupyter_server/tests/test_version.py index 6c30136eab..879c257c46 100644 --- a/jupyter_server/tests/test_version.py +++ b/jupyter_server/tests/test_version.py @@ -1,28 +1,34 @@ import re + import pytest from jupyter_server import __version__ -pep440re = re.compile(r'^(\d+)\.(\d+)\.(\d+((a|b|rc)\d+)?)(\.post\d+)?(\.dev\d*)?$') +pep440re = re.compile(r"^(\d+)\.(\d+)\.(\d+((a|b|rc)\d+)?)(\.post\d+)?(\.dev\d*)?$") + def raise_on_bad_version(version): if not pep440re.match(version): - raise ValueError("Versions String does apparently not match Pep 440 specification, " - "which might lead to sdist and wheel being seen as 2 different release. " - "E.g: do not use dots for beta/alpha/rc markers.") + raise ValueError( + "Versions String does apparently not match Pep 440 specification, " + "which might lead to sdist and wheel being seen as 2 different release. " + "E.g: do not use dots for beta/alpha/rc markers." + ) + # --------- Meta test to test the versioning tests ------------- + @pytest.mark.parametrize( - 'version', + "version", [ - '4.1.0.b1', - '4.1.b1', - '4.2', - 'X.y.z', - '1.2.3.dev1.post2', - ] + "4.1.0.b1", + "4.1.b1", + "4.2", + "X.y.z", + "1.2.3.dev1.post2", + ], ) def test_invalid_pep440_versions(version): with pytest.raises(ValueError): @@ -30,11 +36,11 @@ def test_invalid_pep440_versions(version): @pytest.mark.parametrize( - 'version', + "version", [ - '4.1.1', - '4.2.1b3', - ] + "4.1.1", + "4.2.1b3", + ], ) def test_valid_pep440_versions(version): assert raise_on_bad_version(version) is None @@ -43,6 +49,3 @@ def test_valid_pep440_versions(version): # --------- Test current version -------------- def test_current_version(): raise_on_bad_version(__version__) - - - diff --git a/jupyter_server/tests/test_view.py b/jupyter_server/tests/test_view.py index 5176ee733f..f6fbca5a93 100644 --- a/jupyter_server/tests/test_view.py +++ b/jupyter_server/tests/test_view.py @@ -4,8 +4,8 @@ import pytest import tornado -from jupyter_server.utils import url_path_join from .utils import expected_http_error +from jupyter_server.utils import url_path_join class IFrameSrcFinder(HTMLParser): diff --git a/jupyter_server/tests/unix_sockets/conftest.py b/jupyter_server/tests/unix_sockets/conftest.py index c3eb43f4fc..dffd4bb4b6 100644 --- a/jupyter_server/tests/unix_sockets/conftest.py +++ b/jupyter_server/tests/unix_sockets/conftest.py @@ -1,6 +1,8 @@ import os import pathlib + import pytest + from jupyter_server import DEFAULT_JUPYTER_SERVER_PORT @@ -15,8 +17,8 @@ def jp_unix_socket_file(jp_process_id): """Define a temporary socket connection""" # Rely on `/tmp` to avoid any Linux socket length max buffer # issues. Key on PID for process-wise concurrency. - tmp_path = pathlib.Path('/tmp') - filename = 'jupyter_server.{}.sock'.format(jp_process_id) + tmp_path = pathlib.Path("/tmp") + filename = "jupyter_server.{}.sock".format(jp_process_id) jp_unix_socket_file = tmp_path.joinpath(filename) yield str(jp_unix_socket_file) # Clean up the file after the test runs. diff --git a/jupyter_server/tests/unix_sockets/test_api.py b/jupyter_server/tests/unix_sockets/test_api.py index 3d5a69ccb2..1653a90749 100644 --- a/jupyter_server/tests/unix_sockets/test_api.py +++ b/jupyter_server/tests/unix_sockets/test_api.py @@ -1,15 +1,15 @@ import sys + import pytest # Skip this module if on Windows. Unix sockets are not available on Windows. pytestmark = pytest.mark.skipif( - sys.platform.startswith('win'), - reason="Unix sockets are not available on Windows." + sys.platform.startswith("win"), reason="Unix sockets are not available on Windows." ) import urllib -if not sys.platform.startswith('win'): +if not sys.platform.startswith("win"): from tornado.netutil import bind_unix_socket import jupyter_server.serverapp @@ -23,12 +23,7 @@ @pytest.fixture def jp_server_config(jp_unix_socket_file): """Configure the serverapp fixture with the unix socket.""" - return { - "ServerApp": { - "sock" : jp_unix_socket_file, - "allow_remote_access": True - } - } + return {"ServerApp": {"sock": jp_unix_socket_file, "allow_remote_access": True}} @pytest.fixture @@ -44,14 +39,16 @@ def http_server_port(jp_unix_socket_file, jp_process_id): @pytest.fixture def jp_unix_socket_fetch(jp_unix_socket_file, jp_auth_header, jp_base_url, http_server, io_loop): """A fetch fixture for Jupyter Server tests that use the unix_serverapp fixture""" + async def client(*parts, headers={}, params={}, **kwargs): # Handle URL strings host_url = urlencode_unix_socket(jp_unix_socket_file) path_url = url_path_join(jp_base_url, *parts) params_url = urllib.parse.urlencode(params) - url = url_path_join(host_url, path_url+ "?" + params_url) + url = url_path_join(host_url, path_url + "?" + params_url) r = await async_fetch(url, headers=headers, io_loop=io_loop, **kwargs) return r + return client @@ -69,4 +66,4 @@ async def test_list_running_servers(jp_unix_socket_file, http_server): """Test that a server running on unix sockets is discovered by the server list""" servers = list(jupyter_server.serverapp.list_running_servers()) assert len(servers) >= 1 - assert jp_unix_socket_file in {info['sock'] for info in servers} + assert jp_unix_socket_file in {info["sock"] for info in servers} diff --git a/jupyter_server/tests/unix_sockets/test_serverapp_integration.py b/jupyter_server/tests/unix_sockets/test_serverapp_integration.py index 263de59754..69be15b934 100644 --- a/jupyter_server/tests/unix_sockets/test_serverapp_integration.py +++ b/jupyter_server/tests/unix_sockets/test_serverapp_integration.py @@ -1,11 +1,11 @@ -import sys import stat +import sys + import pytest # Skip this module if on Windows. Unix sockets are not available on Windows. pytestmark = pytest.mark.skipif( - sys.platform.startswith('win'), - reason="Unix sockets are not available on Windows." + sys.platform.startswith("win"), reason="Unix sockets are not available on Windows." ) import os @@ -20,36 +20,37 @@ def test_shutdown_sock_server_integration(jp_unix_socket_file): url = urlencode_unix_socket(jp_unix_socket_file).encode() encoded_sock_path = urlencode_unix_socket_path(jp_unix_socket_file) p = subprocess.Popen( - ['jupyter-server', '--sock=%s' % jp_unix_socket_file, '--sock-mode=0700'], - stdout=subprocess.PIPE, stderr=subprocess.PIPE + ["jupyter-server", "--sock=%s" % jp_unix_socket_file, "--sock-mode=0700"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, ) complete = False - for line in iter(p.stderr.readline, b''): + for line in iter(p.stderr.readline, b""): if url in line: complete = True break - assert complete, 'did not find socket URL in stdout when launching notebook' + assert complete, "did not find socket URL in stdout when launching notebook" socket_path = encoded_sock_path.encode() - assert socket_path in subprocess.check_output(['jupyter-server', 'list']) + assert socket_path in subprocess.check_output(["jupyter-server", "list"]) # Ensure umask is properly applied. assert stat.S_IMODE(os.lstat(jp_unix_socket_file).st_mode) == 0o700 try: - subprocess.check_output(['jupyter-server', 'stop'], stderr=subprocess.STDOUT) + subprocess.check_output(["jupyter-server", "stop"], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: - assert 'There is currently no server running on' in e.output.decode() + assert "There is currently no server running on" in e.output.decode() else: - raise AssertionError('expected stop command to fail due to target mis-match') + raise AssertionError("expected stop command to fail due to target mis-match") - assert encoded_sock_path.encode() in subprocess.check_output(['jupyter-server', 'list']) + assert encoded_sock_path.encode() in subprocess.check_output(["jupyter-server", "list"]) - subprocess.check_output(['jupyter-server', 'stop', jp_unix_socket_file]) + subprocess.check_output(["jupyter-server", "stop", jp_unix_socket_file]) - assert encoded_sock_path.encode() not in subprocess.check_output(['jupyter-server', 'list']) + assert encoded_sock_path.encode() not in subprocess.check_output(["jupyter-server", "list"]) p.wait() @@ -58,38 +59,35 @@ def test_shutdown_sock_server_integration(jp_unix_socket_file): def test_sock_server_validate_sockmode_type(): try: subprocess.check_output( - ['jupyter-server', '--sock=/tmp/nonexistent', '--sock-mode=badbadbad'], - stderr=subprocess.STDOUT + ["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=badbadbad"], + stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: - assert 'badbadbad' in e.output.decode() + assert "badbadbad" in e.output.decode() else: - raise AssertionError('expected execution to fail due to validation of --sock-mode param') + raise AssertionError("expected execution to fail due to validation of --sock-mode param") @pytest.mark.integration_test def test_sock_server_validate_sockmode_accessible(): try: subprocess.check_output( - ['jupyter-server', '--sock=/tmp/nonexistent', '--sock-mode=0444'], - stderr=subprocess.STDOUT + ["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=0444"], + stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: - assert '0444' in e.output.decode() + assert "0444" in e.output.decode() else: - raise AssertionError('expected execution to fail due to validation of --sock-mode param') + raise AssertionError("expected execution to fail due to validation of --sock-mode param") -def _ensure_stopped(check_msg='There are no running servers'): +def _ensure_stopped(check_msg="There are no running servers"): try: - subprocess.check_output( - ['jupyter-server', 'stop'], - stderr=subprocess.STDOUT - ) + subprocess.check_output(["jupyter-server", "stop"], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: assert check_msg in e.output.decode() else: - raise AssertionError('expected all servers to be stopped') + raise AssertionError("expected all servers to be stopped") @pytest.mark.integration_test @@ -98,42 +96,36 @@ def test_stop_multi_integration(jp_unix_socket_file, jp_http_port): Mostly suitable for local dev testing due to reliance on default port binding. """ - TEST_PORT = '9797' - MSG_TMPL = 'Shutting down server on {}...' + TEST_PORT = "9797" + MSG_TMPL = "Shutting down server on {}..." _ensure_stopped() # Default port. - p1 = subprocess.Popen( - ['jupyter-server', '--no-browser'] - ) + p1 = subprocess.Popen(["jupyter-server", "--no-browser"]) # Unix socket. - p2 = subprocess.Popen( - ['jupyter-server', '--sock=%s' % jp_unix_socket_file] - ) + p2 = subprocess.Popen(["jupyter-server", "--sock=%s" % jp_unix_socket_file]) # Specified port - p3 = subprocess.Popen( - ['jupyter-server', '--no-browser', '--port=%s' % TEST_PORT] - ) + p3 = subprocess.Popen(["jupyter-server", "--no-browser", "--port=%s" % TEST_PORT]) time.sleep(3) shutdown_msg = MSG_TMPL.format(jp_http_port) - assert shutdown_msg in subprocess.check_output( - ['jupyter-server', 'stop'] - ).decode() + assert shutdown_msg in subprocess.check_output(["jupyter-server", "stop"]).decode() - _ensure_stopped('There is currently no server running on 8888') + _ensure_stopped("There is currently no server running on 8888") - assert MSG_TMPL.format(jp_unix_socket_file) in subprocess.check_output( - ['jupyter-server', 'stop', jp_unix_socket_file] - ).decode() + assert ( + MSG_TMPL.format(jp_unix_socket_file) + in subprocess.check_output(["jupyter-server", "stop", jp_unix_socket_file]).decode() + ) - assert MSG_TMPL.format(TEST_PORT) in subprocess.check_output( - ['jupyter-server', 'stop', TEST_PORT] - ).decode() + assert ( + MSG_TMPL.format(TEST_PORT) + in subprocess.check_output(["jupyter-server", "stop", TEST_PORT]).decode() + ) _ensure_stopped() @@ -146,12 +138,12 @@ def test_stop_multi_integration(jp_unix_socket_file, jp_http_port): def test_launch_socket_collision(jp_unix_socket_file): """Tests UNIX socket in-use detection for lifecycle correctness.""" sock = jp_unix_socket_file - check_msg = 'socket %s is already in use' % sock + check_msg = "socket %s is already in use" % sock _ensure_stopped() # Start a server. - cmd = ['jupyter-server', '--sock=%s' % sock] + cmd = ["jupyter-server", "--sock=%s" % sock] p1 = subprocess.Popen(cmd) time.sleep(3) @@ -166,7 +158,7 @@ def test_launch_socket_collision(jp_unix_socket_file): raise AssertionError("expected 'already in use' error, got success instead!") # Stop the background server, ensure it's stopped and wait on the process to exit. - subprocess.check_call(['jupyter-server', 'stop', sock]) + subprocess.check_call(["jupyter-server", "stop", sock]) _ensure_stopped() diff --git a/jupyter_server/tests/utils.py b/jupyter_server/tests/utils.py index 8e7376897c..c2d6181d62 100644 --- a/jupyter_server/tests/utils.py +++ b/jupyter_server/tests/utils.py @@ -1,11 +1,12 @@ import json + import tornado some_resource = u"The very model of a modern major general" sample_kernel_json = { - 'argv':['cat', '{connection_file}'], - 'display_name': 'Test kernel', + "argv": ["cat", "{connection_file}"], + "display_name": "Test kernel", } @@ -25,14 +26,16 @@ def expected_http_error(error, expected_code, expected_message=None): if expected_message is not None and expected_message != str(e): return False return True - elif any([ - isinstance(e, tornado.httpclient.HTTPClientError), - isinstance(e, tornado.httpclient.HTTPError) - ]): + elif any( + [ + isinstance(e, tornado.httpclient.HTTPClientError), + isinstance(e, tornado.httpclient.HTTPError), + ] + ): if expected_code != e.code: return False if expected_message: - message = json.loads(e.response.body.decode())['message'] + message = json.loads(e.response.body.decode())["message"] if expected_message != message: return False return True diff --git a/jupyter_server/traittypes.py b/jupyter_server/traittypes.py index 40c67be2e1..e551077260 100644 --- a/jupyter_server/traittypes.py +++ b/jupyter_server/traittypes.py @@ -1,6 +1,9 @@ -from ast import literal_eval import inspect -from traitlets import ClassBasedTraitType, Undefined, TraitError +from ast import literal_eval + +from traitlets import ClassBasedTraitType +from traitlets import TraitError +from traitlets import Undefined # Traitlet's 5.x includes a set of utilities for building # description strings for objects. Traitlets 5.x does not @@ -102,16 +105,15 @@ class name where an object was defined. name = _prefix(value) + name if tick_wrap: name = name.join("''") - return describe(article, value, name=name, - verbose=verbose, capital=capital) + return describe(article, value, name=name, verbose=verbose, capital=capital) elif article in ("a", "an") or article is None: if article is None: return typename return add_article(typename, False, capital) else: - raise ValueError("The 'article' argument should " - "be 'the', 'a', 'an', or None not %r" % article) - + raise ValueError( + "The 'article' argument should " "be 'the', 'a', 'an', or None not %r" % article + ) def add_article(name, definite=False, capital=False): """Returns the string with a prepended article. @@ -129,11 +131,11 @@ def add_article(name, definite=False, capital=False): if definite: result = "the " + name else: - first_letters = re.compile(r'[\W_]+').sub('', name) - if first_letters[:1].lower() in 'aeiou': - result = 'an ' + name + first_letters = re.compile(r"[\W_]+").sub("", name) + if first_letters[:1].lower() in "aeiou": + result = "an " + name else: - result = 'a ' + name + result = "a " + name if capital: return result[0].upper() + result[1:] else: @@ -141,11 +143,11 @@ def add_article(name, definite=False, capital=False): def _prefix(value): if isinstance(value, types.MethodType): - name = describe(None, value.__self__, verbose=True) + '.' + name = describe(None, value.__self__, verbose=True) + "." else: module = inspect.getmodule(value) if module is not None and module.__name__ != "builtins": - name = module.__name__ + '.' + name = module.__name__ + "." else: name = "" return name @@ -209,8 +211,10 @@ def validate(self, obj, value): try: value = self._resolve_string(value) except ImportError: - raise TraitError("The '%s' trait of %s instance must be a type, but " - "%r could not be imported" % (self.name, obj, value)) + raise TraitError( + "The '%s' trait of %s instance must be a type, but " + "%r could not be imported" % (self.name, obj, value) + ) try: if self.subclass_from_klasses(value): return value @@ -224,12 +228,12 @@ def info(self): result = "a subclass of " for klass in self.klasses: if not isinstance(klass, str): - klass = klass.__module__ + '.' + klass.__name__ + klass = klass.__module__ + "." + klass.__name__ result += f"{klass} or " # Strip the last "or" result = result.strip(" or ") if self.allow_none: - return result + ' or None' + return result + " or None" return result def instance_init(self, obj): @@ -258,7 +262,7 @@ def default_value_repr(self): if isinstance(value, str): return repr(value) else: - return repr(f'{value.__module__}.{value.__name__}') + return repr(f"{value.__module__}.{value.__name__}") class InstanceFromClasses(ClassBasedTraitType): @@ -266,6 +270,7 @@ class InstanceFromClasses(ClassBasedTraitType): The value can also be an instance of a subclass of the specified classes. Subclasses can declare default classes by overriding the klass attribute """ + def __init__(self, klasses=None, args=None, kw=None, **kwargs): """Construct an Instance trait. This trait allows values that are instances of a particular @@ -297,8 +302,10 @@ class or its subclasses. Our implementation is quite different elif all(inspect.isclass(k) or isinstance(k, str) for k in klasses): self.klasses = klasses else: - raise TraitError('The klasses attribute must be a list of class names or classes' - ' not: %r' % klasses) + raise TraitError( + "The klasses attribute must be a list of class names or classes" + " not: %r" % klasses + ) if (kw is not None) and not isinstance(kw, dict): raise TraitError("The 'kw' argument must be a dict or None.") @@ -330,7 +337,7 @@ def info(self): result += " or " result = result.strip(" or ") if self.allow_none: - result += ' or None' + result += " or None" return result def instance_init(self, obj): @@ -354,8 +361,7 @@ def _resolve_classes(self): def make_dynamic_default(self): if (self.default_args is None) and (self.default_kwargs is None): return None - return self.klass(*(self.default_args or ()), - **(self.default_kwargs or {})) + return self.klass(*(self.default_args or ()), **(self.default_kwargs or {})) def default_value_repr(self): return repr(self.make_dynamic_default()) diff --git a/jupyter_server/transutils.py b/jupyter_server/transutils.py index 8363693abf..2ca30e437d 100644 --- a/jupyter_server/transutils.py +++ b/jupyter_server/transutils.py @@ -1,11 +1,9 @@ """Translation related utilities. When imported, injects _ to builtins""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - +import gettext import os import warnings -import gettext def _trans_gettext_deprecation_helper(*args, **kwargs): @@ -15,7 +13,9 @@ def _trans_gettext_deprecation_helper(*args, **kwargs): # Set up message catalog access -base_dir = os.path.realpath(os.path.join(__file__, '..', '..')) -trans = gettext.translation('notebook', localedir=os.path.join(base_dir, 'notebook/i18n'), fallback=True) +base_dir = os.path.realpath(os.path.join(__file__, "..", "..")) +trans = gettext.translation( + "notebook", localedir=os.path.join(base_dir, "notebook/i18n"), fallback=True +) _ = _trans_gettext_deprecation_helper _i18n = trans.gettext diff --git a/jupyter_server/utils.py b/jupyter_server/utils.py index 9a062dfc03..93f9792973 100644 --- a/jupyter_server/utils.py +++ b/jupyter_server/utils.py @@ -1,9 +1,6 @@ """Notebook related utilities""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -from _frozen_importlib_external import _NamespacePath import asyncio import errno import importlib.util @@ -11,14 +8,21 @@ import os import socket import sys -from distutils.version import LooseVersion +from _frozen_importlib_external import _NamespacePath from contextlib import contextmanager - -from urllib.parse import (quote, unquote, urlparse, - urlsplit, urlunsplit, SplitResult) +from distutils.version import LooseVersion +from urllib.parse import quote +from urllib.parse import SplitResult +from urllib.parse import unquote from urllib.parse import urljoin # pylint: disable=unused-import +from urllib.parse import urlparse +from urllib.parse import urlsplit +from urllib.parse import urlunsplit from urllib.request import pathname2url # pylint: disable=unused-import -from tornado.httpclient import AsyncHTTPClient, HTTPClient, HTTPRequest + +from tornado.httpclient import AsyncHTTPClient +from tornado.httpclient import HTTPClient +from tornado.httpclient import HTTPRequest from tornado.netutil import Resolver @@ -28,13 +32,16 @@ def url_path_join(*pieces): Use to prevent double slash when joining subpath. This will leave the initial and final / in place """ - initial = pieces[0].startswith('/') - final = pieces[-1].endswith('/') - stripped = [s.strip('/') for s in pieces] - result = '/'.join(s for s in stripped if s) - if initial: result = '/' + result - if final: result = result + '/' - if result == '//': result = '/' + initial = pieces[0].startswith("/") + final = pieces[-1].endswith("/") + stripped = [s.strip("/") for s in pieces] + result = "/".join(s for s in stripped if s) + if initial: + result = "/" + result + if final: + result = result + "/" + if result == "//": + result = "/" return result @@ -45,17 +52,17 @@ def url_is_absolute(url): def path2url(path): """Convert a local file path to a URL""" - pieces = [ quote(p) for p in path.split(os.sep) ] + pieces = [quote(p) for p in path.split(os.sep)] # preserve trailing / - if pieces[-1] == '': - pieces[-1] = '/' + if pieces[-1] == "": + pieces[-1] = "/" url = url_path_join(*pieces) return url def url2path(url): """Convert a URL to a local file path""" - pieces = [ unquote(p) for p in url.split('/') ] + pieces = [unquote(p) for p in url.split("/")] path = os.path.join(*pieces) return path @@ -99,33 +106,32 @@ def samefile_simple(path, other_path): """ path_stat = os.stat(path) other_path_stat = os.stat(other_path) - return (path.lower() == other_path.lower() - and path_stat == other_path_stat) + return path.lower() == other_path.lower() and path_stat == other_path_stat -def to_os_path(path, root=''): +def to_os_path(path, root=""): """Convert an API path to a filesystem path If given, root will be prepended to the path. root must be a filesystem path already. """ - parts = path.strip('/').split('/') - parts = [p for p in parts if p != ''] # remove duplicate splits + parts = path.strip("/").split("/") + parts = [p for p in parts if p != ""] # remove duplicate splits path = os.path.join(root, *parts) return path -def to_api_path(os_path, root=''): +def to_api_path(os_path, root=""): """Convert a filesystem path to an API path If given, root will be removed from the path. root must be a filesystem path already. """ if os_path.startswith(root): - os_path = os_path[len(root):] + os_path = os_path[len(root) :] parts = os_path.strip(os.path.sep).split(os.path.sep) - parts = [p for p in parts if p != ''] # remove duplicate splits - path = '/'.join(parts) + parts = [p for p in parts if p != ""] # remove duplicate splits + path = "/".join(parts) return path @@ -144,11 +150,14 @@ def check_version(v, check): # Copy of IPython.utils.process.check_pid: + def _check_pid_win32(pid): import ctypes + # OpenProcess returns 0 if no such process (of ours) exists # positive int otherwise - return bool(ctypes.windll.kernel32.OpenProcess(1,0,pid)) + return bool(ctypes.windll.kernel32.OpenProcess(1, 0, pid)) + def _check_pid_posix(pid): """Copy of IPython.utils.process.check_pid""" @@ -164,7 +173,8 @@ def _check_pid_posix(pid): else: return True -if sys.platform == 'win32': + +if sys.platform == "win32": check_pid = _check_pid_win32 else: check_pid = _check_pid_posix @@ -178,7 +188,7 @@ async def ensure_async(obj): try: result = await obj except RuntimeError as e: - if str(e) == 'cannot reuse already awaited coroutine': + if str(e) == "cannot reuse already awaited coroutine": # obj is already the coroutine's result return obj raise @@ -222,12 +232,13 @@ def wrapped(): try: result = loop.run_until_complete(maybe_async) except RuntimeError as e: - if str(e) == 'This event loop is already running': + if str(e) == "This event loop is already running": # just return a Future, hoping that it will be awaited result = asyncio.ensure_future(maybe_async) else: raise e return result + return wrapped() @@ -256,17 +267,17 @@ async def run_sync_in_loop(maybe_async): def urlencode_unix_socket_path(socket_path): """Encodes a UNIX socket path string from a socket path for the `http+unix` URI form.""" - return socket_path.replace('/', '%2F') + return socket_path.replace("/", "%2F") def urldecode_unix_socket_path(socket_path): """Decodes a UNIX sock path string from an encoded sock path for the `http+unix` URI form.""" - return socket_path.replace('%2F', '/') + return socket_path.replace("%2F", "/") def urlencode_unix_socket(socket_path): """Encodes a UNIX socket URL from a socket path for the `http+unix` URI form.""" - return 'http+unix://%s' % urlencode_unix_socket_path(socket_path) + return "http+unix://%s" % urlencode_unix_socket_path(socket_path) def unix_socket_in_use(socket_path): @@ -286,12 +297,7 @@ def unix_socket_in_use(socket_path): @contextmanager -def _request_for_tornado_client( - urlstring, - method="GET", - body=None, - headers=None -): +def _request_for_tornado_client(urlstring, method="GET", body=None, headers=None): """A utility that provides a context that handles HTTP, HTTPS, and HTTP+UNIX request. Creates a tornado HTTPRequest object with a URL @@ -310,7 +316,7 @@ def _request_for_tornado_client( netloc=parts.netloc, path=parts.path, query=parts.query, - fragment=parts.fragment + fragment=parts.fragment, ) class UnixSocketResolver(Resolver): @@ -320,6 +326,7 @@ class UnixSocketResolver(Resolver): must be `http` (not `http+unix`). Applications should replace the scheme in URLS before making a request to the HTTP client. """ + def initialize(self, resolver): self.resolver = resolver @@ -327,9 +334,7 @@ def close(self): self.resolver.close() async def resolve(self, host, port, *args, **kwargs): - return [ - (socket.AF_UNIX, urldecode_unix_socket_path(host)) - ] + return [(socket.AF_UNIX, urldecode_unix_socket_path(host))] resolver = UnixSocketResolver(resolver=Resolver()) AsyncHTTPClient.configure(None, resolver=resolver) @@ -338,21 +343,11 @@ async def resolve(self, host, port, *args, **kwargs): # Yield the request for the given client. url = urlunsplit(parts) - request = HTTPRequest( - url, - method=method, - body=body, - headers=headers - ) + request = HTTPRequest(url, method=method, body=body, headers=headers) yield request -def fetch( - urlstring, - method="GET", - body=None, - headers=None -): +def fetch(urlstring, method="GET", body=None, headers=None): """ Send a HTTP, HTTPS, or HTTP+UNIX request to a Tornado Web Server. Returns a tornado HTTPResponse. @@ -362,13 +357,7 @@ def fetch( return response -async def async_fetch( - urlstring, - method="GET", - body=None, - headers=None, - io_loop=None -): +async def async_fetch(urlstring, method="GET", body=None, headers=None, io_loop=None): """ Send an asynchronous HTTP, HTTPS, or HTTP+UNIX request to a Tornado Web Server. Returns a tornado HTTPResponse. diff --git a/jupyter_server/view/handlers.py b/jupyter_server/view/handlers.py index 76f5a65b29..6ad73a17f1 100644 --- a/jupyter_server/view/handlers.py +++ b/jupyter_server/view/handlers.py @@ -1,27 +1,29 @@ -#encoding: utf-8 +# encoding: utf-8 """Tornado handlers for viewing HTML files.""" - # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - from tornado import web -from ..base.handlers import JupyterHandler, path_regex -from ..utils import ensure_async, url_escape, url_path_join + +from ..base.handlers import JupyterHandler +from ..base.handlers import path_regex +from ..utils import ensure_async +from ..utils import url_escape +from ..utils import url_path_join class ViewHandler(JupyterHandler): """Render HTML files within an iframe.""" + @web.authenticated async def get(self, path): - path = path.strip('/') + path = path.strip("/") if not await ensure_async(self.contents_manager.file_exists(path)): - raise web.HTTPError(404, u'File does not exist: %s' % path) + raise web.HTTPError(404, u"File does not exist: %s" % path) + + basename = path.rsplit("/", 1)[-1] + file_url = url_path_join(self.base_url, "files", url_escape(path)) + self.write(self.render_template("view.html", file_url=file_url, page_title=basename)) - basename = path.rsplit('/', 1)[-1] - file_url = url_path_join(self.base_url, 'files', url_escape(path)) - self.write( - self.render_template('view.html', file_url=file_url, page_title=basename) - ) default_handlers = [ (r"/view%s" % path_regex, ViewHandler), diff --git a/setup.py b/setup.py index e9e09be329..9a33dccc5d 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,8 @@ try: from jupyter_packaging import wrap_installers, npm_builder - ensured_targets = ['jupyter_server/static/style/bootstrap.min.css'] + + ensured_targets = ["jupyter_server/static/style/bootstrap.min.css"] cmdclass = wrap_installers(pre_develop=npm_builder(), ensured_targets=ensured_targets) except ImportError: cmdclass = {}