diff --git a/.babelrc b/.babelrc deleted file mode 100644 index c13c5f627f..0000000000 --- a/.babelrc +++ /dev/null @@ -1,3 +0,0 @@ -{ - "presets": ["es2015"] -} diff --git a/.eslintignore b/.eslintignore deleted file mode 100644 index db228f0817..0000000000 --- a/.eslintignore +++ /dev/null @@ -1,5 +0,0 @@ -*.min.js -*components* -*node_modules* -*built* -*build* diff --git a/.eslintrc.json b/.eslintrc.json deleted file mode 100644 index 3b41a5ad9f..0000000000 --- a/.eslintrc.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "parserOptions": { - "ecmaVersion": 6, - "sourceType": "module" - }, - "rules": { - "semi": 1, - "no-cond-assign": 2, - "no-debugger": 2, - "comma-dangle": 0, - "no-unreachable": 2 - } -} diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..f516e56ded --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 +updates: + # GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + # Python + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/check-release.yml b/.github/workflows/check-release.yml deleted file mode 100644 index 785b97189e..0000000000 --- a/.github/workflows/check-release.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Check Release -on: - push: - branches: ["main"] - pull_request: - branches: ["*"] - -jobs: - check_release: - runs-on: ubuntu-latest - strategy: - matrix: - group: [check_release, link_check] - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Base Setup - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - - name: Install Dependencies - run: | - pip install -e . - - name: Check Release - if: ${{ matrix.group == 'check_release' }} - uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v1 - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Run Link Check - if: ${{ matrix.group == 'link_check' }} - uses: jupyter-server/jupyter_releaser/.github/actions/check-links@v1 diff --git a/.github/workflows/downstream.yml b/.github/workflows/downstream.yml index 8a617d76dc..8763635490 100644 --- a/.github/workflows/downstream.yml +++ b/.github/workflows/downstream.yml @@ -1,17 +1,17 @@ -name: Test downstream projects +name: Downstream Tests on: push: pull_request: jobs: - tests: + nbclassic: runs-on: ubuntu-latest - timeout-minutes: 20 + timeout-minutes: 10 steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 @@ -20,17 +20,7 @@ jobs: uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: nbclassic - - - name: Test jupyterlab_server - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 - with: - package_name: jupyterlab_server - - - name: Test jupyterlab - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 - with: - package_name: jupyterlab - test_command: "python -m jupyterlab.browser_check --no-browser-test" + test_command: pip install pytest-jupyter[server] && pytest -vv -raXxs -W default --durations 10 --color=yes - name: Test run nbclassic run: | @@ -45,3 +35,107 @@ jobs: sleep 5 kill $TASK_PID wait $TASK_PID + + notebook: + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Base Setup + uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + + - name: Test notebook + uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 + with: + package_name: notebook + package_download_extra_args: "--pre" + test_command: pip install pytest-jupyter[server] && pytest -vv -raXxs -W default --durations 10 --color=yes + + jupyterlab_server: + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Base Setup + uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + + - run: pip install pytest-jupyter[server] + + - name: Test jupyterlab_server + uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 + with: + package_name: jupyterlab_server + test_command: pip install pytest-jupyter[server] && pytest -vv -raXxs -W default --durations 10 --color=yes + + jupyterlab: + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Base Setup + uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + + - name: Test jupyterlab + uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 + with: + package_name: jupyterlab + test_command: "python -m jupyterlab.browser_check --no-browser-test" + + jupyter_server_terminals: + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Base Setup + uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + + - name: Test jupyter_server_terminals + uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 + with: + test_command: pip install pytest-jupyter[server] && pytest -vv -raXxs -W default --durations 10 --color=yes + package_name: jupyter_server_terminals + + jupytext: + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Base Setup + uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + + - name: Test jupytext + uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 + with: + package_name: jupytext + test_command: pip install pytest-jupyter[server] gitpython pre-commit && python -m ipykernel install --name jupytext-dev --user && pytest -vv -raXxs -W default --durations 10 --color=yes --ignore=tests/test_doc_files_are_notebooks.py --ignore=tests/test_changelog.py + + downstream_check: # This job does nothing and is only used for the branch protection + if: always() + needs: + - jupyterlab + - jupyter_server_terminals + - jupyterlab_server + - notebook + - nbclassic + - jupytext + runs-on: ubuntu-latest + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml deleted file mode 100644 index bb7e770201..0000000000 --- a/.github/workflows/integration-tests.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Jupyter Server Integration Tests [Linux] -on: - push: - branches: ["main"] - pull_request: - branches: ["*"] -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ["3.7", "3.8", "3.9", "3.10", "pypy-3.7"] - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Base Setup - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - - name: Install the Python dependencies - run: | - pip install -e ".[test]" - pip install pytest-github-actions-annotate-failures - - name: List installed packages - run: | - pip freeze - pip check - - name: Run the tests - run: | - pytest -vv --integration_tests=true tests diff --git a/.github/workflows/prep-release.yml b/.github/workflows/prep-release.yml new file mode 100644 index 0000000000..7a2a18de75 --- /dev/null +++ b/.github/workflows/prep-release.yml @@ -0,0 +1,42 @@ +name: "Step 1: Prep Release" +on: + workflow_dispatch: + inputs: + version_spec: + description: "New Version Specifier" + default: "next" + required: false + branch: + description: "The branch to target" + required: false + post_version_spec: + description: "Post Version Specifier" + required: false + since: + description: "Use PRs with activity since this date or git reference" + required: false + since_last_stable: + description: "Use PRs with activity since the last stable git tag" + required: false + type: boolean +jobs: + prep_release: + runs-on: ubuntu-latest + steps: + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + + - name: Prep Release + id: prep-release + uses: jupyter-server/jupyter_releaser/.github/actions/prep-release@v2 + with: + token: ${{ secrets.ADMIN_GITHUB_TOKEN }} + version_spec: ${{ github.event.inputs.version_spec }} + post_version_spec: ${{ github.event.inputs.post_version_spec }} + target: ${{ github.event.inputs.target }} + branch: ${{ github.event.inputs.branch }} + since: ${{ github.event.inputs.since }} + since_last_stable: ${{ github.event.inputs.since_last_stable }} + + - name: "** Next Step **" + run: | + echo "Optional): Review Draft Release: ${{ steps.prep-release.outputs.release_url }}" diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml new file mode 100644 index 0000000000..dbaaeaad24 --- /dev/null +++ b/.github/workflows/publish-release.yml @@ -0,0 +1,54 @@ +name: "Step 2: Publish Release" +on: + workflow_dispatch: + inputs: + branch: + description: "The target branch" + required: false + release_url: + description: "The URL of the draft GitHub release" + required: false + steps_to_skip: + description: "Comma separated list of steps to skip" + required: false + +jobs: + publish_release: + runs-on: ubuntu-latest + steps: + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + + - name: Populate Release + id: populate-release + uses: jupyter-server/jupyter_releaser/.github/actions/populate-release@v2 + with: + token: ${{ secrets.ADMIN_GITHUB_TOKEN }} + target: ${{ github.event.inputs.target }} + branch: ${{ github.event.inputs.branch }} + release_url: ${{ github.event.inputs.release_url }} + steps_to_skip: ${{ github.event.inputs.steps_to_skip }} + + - name: Finalize Release + id: finalize-release + env: + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + PYPI_TOKEN_MAP: ${{ secrets.PYPI_TOKEN_MAP }} + TWINE_USERNAME: __token__ + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + uses: jupyter-server/jupyter-releaser/.github/actions/finalize-release@v2 + with: + token: ${{ secrets.ADMIN_GITHUB_TOKEN }} + target: ${{ github.event.inputs.target }} + release_url: ${{ steps.populate-release.outputs.release_url }} + + - name: "** Next Step **" + if: ${{ success() }} + run: | + echo "Verify the final release" + echo ${{ steps.finalize-release.outputs.release_url }} + + - name: "** Failure Message **" + if: ${{ failure() }} + run: | + echo "Failed to Publish the Draft Release Url:" + echo ${{ steps.populate-release.outputs.release_url }} diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index b57377dd46..7e821d5649 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -6,6 +6,10 @@ on: schedule: - cron: "0 8 * * *" +defaults: + run: + shell: bash -eux {0} + jobs: build: runs-on: ${{ matrix.os }} @@ -14,63 +18,83 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macos-latest] - python-version: ["3.7", "3.10"] + python-version: ["3.8", "3.11"] include: - os: windows-latest python-version: "3.9" - os: ubuntu-latest - python-version: "pypy-3.7" + python-version: "pypy-3.8" - os: macos-latest - python-version: "3.8" + python-version: "3.10" + - os: ubuntu-latest + python-version: "3.12" steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - - name: Install the Python dependencies + - name: Install nbconvert dependencies on Linux + if: startsWith(runner.os, 'Linux') run: | - pip install -e ".[test]" codecov - - name: Run the tests + sudo apt-get update + sudo apt-get install texlive-plain-generic inkscape texlive-xetex + sudo apt-get install xvfb x11-utils libxkbcommon-x11-0 + # pandoc is not up to date in the ubuntu repos, so we install directly + wget https://github.com/jgm/pandoc/releases/download/3.1.2/pandoc-3.1.2-1-amd64.deb && sudo dpkg -i pandoc-3.1.2-1-amd64.deb + - name: Run the tests on posix if: ${{ !startsWith(matrix.python-version, 'pypy') && !startsWith(matrix.os, 'windows') }} + run: hatch run cov:test --cov-fail-under 75 || hatch run test:test --lf + - name: Run the tests on pypy + if: ${{ startsWith(matrix.python-version, 'pypy') }} + run: hatch run test:nowarn || hatch run test:nowarn --lf + - name: Run the tests on windows + if: ${{ startsWith(matrix.os, 'windows') }} + run: hatch run cov:nowarn -s || hatch run cov:nowarn --lf + - uses: jupyterlab/maintainer-tools/.github/actions/upload-coverage@v1 + + test_docs: + name: Test Docs + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - name: Install Dependencies run: | - args="-vv --cov jupyter_server --cov-branch --cov-report term-missing:skip-covered" - python -m pytest $args --cov-fail-under 70 || python -m pytest $args --lf - - name: Run the tests on pypy and windows - if: ${{ startsWith(matrix.python-version, 'pypy') || startsWith(matrix.os, 'windows') }} - run: | - python -m pytest -vv || python -m pytest -vv --lf - - name: Coverage + sudo apt-get update + sudo apt-get install enchant-2 # for spell checking + - name: Build API docs run: | - codecov + hatch run docs:api + # If this fails run `hatch run docs:api` locally + # and commit. + git status --porcelain + git status -s | grep "A" && exit 1 + git status -s | grep "M" && exit 1 + echo "API docs done" + - run: hatch run docs:build - pre-commit: - name: pre-commit + test_lint: + name: Test Lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - - uses: pre-commit/action@v2.0.0 - with: - extra_args: --all-files --hook-stage=manual - - name: Help message if pre-commit fail - if: ${{ failure() }} + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - name: Run Linters run: | - echo "You can install pre-commit hooks to automatically run formatting" - echo "on each commit with:" - echo " pre-commit install" - echo "or you can run by hand on staged files with" - echo " pre-commit run" - echo "or after-the-fact on already committed files with" - echo " pre-commit run --all-files --hook-stage=manual" + hatch run typing:test + hatch run lint:build + pipx run interrogate -v . + pipx run doc8 --max-line-length=200 --ignore-path=docs/source/other/full-config.rst + npm install -g eslint + eslint . - test_docs_and_examples: - name: Test Docs and Examples + test_examples: + name: Test Examples timeout-minutes: 10 runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Base Setup - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Install the Python dependencies for the examples run: | pip install -e ".[test]" @@ -78,54 +102,39 @@ jobs: - name: Run the tests for the examples run: | python -m pytest examples/simple - - name: Test the docs - run: | - cd docs - pip install -r doc-requirements.txt - make html SPHINXOPTS="-W" test_minimum_versions: name: Test Minimum Versions timeout-minutes: 20 runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Base Setup - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 with: - python_version: "3.7" - - name: Install miniumum versions - uses: jupyterlab/maintainer-tools/.github/actions/install-minimums@v1 + dependency_type: minimum - name: Run the unit tests run: | - pytest -vv -W default || pytest -vv -W default --lf + hatch -vv run test:nowarn || hatch run test:nowarn --lf test_prereleases: name: Test Prereleases runs-on: ubuntu-latest timeout-minutes: 20 steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Base Setup - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - - name: Install the Python dependencies - run: | - pip install --pre -e ".[test]" - - name: List installed packages - run: | - pip freeze - pip check + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + with: + dependency_type: pre - name: Run the tests run: | - pytest -vv || pytest -vv --lf + hatch run test:nowarn || hatch run test:nowarn --lf make_sdist: name: Make SDist runs-on: ubuntu-latest timeout-minutes: 10 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/make-sdist@v1 @@ -137,3 +146,85 @@ jobs: steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1 + with: + package_spec: -vv . + test_command: hatch run test:test || hatch run test:test --lf + + check_release: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Base Setup + uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - name: Install Dependencies + run: | + pip install -e . + - name: Check Release + uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + check_links: + name: Check Links + runs-on: ubuntu-latest + timeout-minutes: 15 + steps: + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - uses: jupyterlab/maintainer-tools/.github/actions/check-links@v1 + + integration_check: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ["3.8", "3.9", "3.10", "3.11"] + steps: + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + - name: Run the tests + run: hatch run cov:integration + - uses: jupyterlab/maintainer-tools/.github/actions/upload-coverage@v1 + + integration_check_pypy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + with: + python_version: "pypy-3.8" + - name: Run the tests + run: hatch run test:nowarn --integration_tests=true + + coverage: + runs-on: ubuntu-latest + needs: + - integration_check + - build + steps: + - uses: actions/checkout@v4 + - uses: jupyterlab/maintainer-tools/.github/actions/report-coverage@v1 + with: + fail_under: 80 + + tests_check: # This job does nothing and is only used for the branch protection + if: always() + needs: + - coverage + - integration_check_pypy + - test_docs + - test_lint + - test_examples + - test_minimum_versions + - test_prereleases + - check_links + - check_release + - test_sdist + runs-on: ubuntu-latest + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} diff --git a/.gitignore b/.gitignore index 8d26500c3b..04ef5c46fa 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,9 @@ config.rst # copied changelog file docs/source/other/changelog.md +# full config is generated on demand +docs/source/other/full-config.rst + # jetbrains ide stuff *.iml .idea/ @@ -44,3 +47,6 @@ docs/source/other/changelog.md .history .vscode/* !.vscode/*.template + +# Compiled static file in example. +examples/simple/simple_ext1/static/bundle.js diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1f2bd51044..ff6b9662b6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,75 +1,82 @@ +ci: + autoupdate_schedule: monthly + autoupdate_commit_msg: "chore: update pre-commit hooks" + repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.2.0 + rev: v4.5.0 hooks: - - id: end-of-file-fixer - id: check-case-conflict + - id: check-ast + - id: check-docstring-first - id: check-executables-have-shebangs - - id: requirements-txt-fixer - id: check-added-large-files - id: check-case-conflict + - id: check-merge-conflict + - id: check-json - id: check-toml - id: check-yaml - id: debug-statements - - id: forbid-new-submodules - - id: check-builtin-literals + - id: end-of-file-fixer - id: trailing-whitespace - - repo: https://github.com/psf/black - rev: 22.3.0 + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.27.1 hooks: - - id: black - args: ["--line-length", "100"] + - id: check-github-workflows - - repo: https://github.com/PyCQA/isort - rev: 5.10.1 + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.17 hooks: - - id: isort - files: \.py$ - args: [--profile=black] + - id: mdformat + additional_dependencies: + [mdformat-gfm, mdformat-frontmatter, mdformat-footnote] - repo: https://github.com/pre-commit/mirrors-prettier - rev: v2.6.2 + rev: "v3.1.0" hooks: - id: prettier + types_or: [yaml, html, json] - - repo: https://github.com/asottile/pyupgrade - rev: v2.32.0 + - repo: https://github.com/adamchainz/blacken-docs + rev: "1.16.0" hooks: - - id: pyupgrade - args: [--py37-plus] + - id: blacken-docs + additional_dependencies: [black==23.7.0] - - repo: https://github.com/PyCQA/doc8 - rev: 0.11.1 + - repo: https://github.com/codespell-project/codespell + rev: "v2.2.6" hooks: - - id: doc8 - args: [--max-line-length=200] - stages: [manual] + - id: codespell + args: ["-L", "sur,nd"] - - repo: https://github.com/pycqa/flake8 - rev: 4.0.1 + - repo: https://github.com/pre-commit/pygrep-hooks + rev: "v1.10.0" hooks: - - id: flake8 - additional_dependencies: - [ - "flake8-bugbear==20.1.4", - "flake8-logging-format==0.6.0", - "flake8-implicit-str-concat==0.2.0", - ] - stages: [manual] + - id: rst-backticks + - id: rst-directive-colons + - id: rst-inline-touching-normal - - repo: https://github.com/pre-commit/mirrors-eslint - rev: v8.13.0 + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.7.0" hooks: - - id: eslint + - id: mypy + files: jupyter_server stages: [manual] + additional_dependencies: + ["traitlets>=5.13", "jupyter_core>=5.5", "jupyter_client>=8.5"] - - repo: https://github.com/sirosen/check-jsonschema - rev: 0.14.2 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.6 hooks: - - id: check-jsonschema - name: "Check GitHub Workflows" - files: ^\.github/workflows/ - types: [yaml] - args: ["--schemafile", "https://json.schemastore.org/github-workflow"] - stages: [manual] + - id: ruff + types_or: [python, jupyter] + args: ["--fix", "--show-fixes"] + - id: ruff-format + types_or: [python, jupyter] + + - repo: https://github.com/scientific-python/cookie + rev: "2023.11.17" + hooks: + - id: sp-repo-review + additional_dependencies: ["repo-review[cli]"] diff --git a/readthedocs.yml b/.readthedocs.yaml similarity index 60% rename from readthedocs.yml rename to .readthedocs.yaml index 011118fd63..208e5c37e2 100644 --- a/readthedocs.yml +++ b/.readthedocs.yaml @@ -1,11 +1,14 @@ version: 2 +build: + os: ubuntu-22.04 + tools: + python: "3.9" sphinx: configuration: docs/source/conf.py -conda: - environment: docs/environment.yml python: - version: 3.8 install: # install itself with pip install . - method: pip path: . + extra_requirements: + - docs diff --git a/CHANGELOG.md b/CHANGELOG.md index 778008e05c..26f3eca1cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,1190 @@ All notable changes to this project will be documented in this file. +## 2.11.2 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.11.1)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-11-27&to=2023-12-04&type=c)) + + + +## 2.11.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.11.0...40a95e5f39d3f167bebf9232da9fab64818ba97d)) + +### Bugs fixed + +- avoid unhandled error on some invalid paths [#1369](https://github.com/jupyter-server/jupyter_server/pull/1369) ([@minrk](https://github.com/minrk)) +- Change md5 to hash and hash_algorithm, fix incompatibility [#1367](https://github.com/jupyter-server/jupyter_server/pull/1367) ([@Wh1isper](https://github.com/Wh1isper)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-11-21&to=2023-11-27&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-11-21..2023-11-27&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afcollonval+updated%3A2023-11-21..2023-11-27&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-11-21..2023-11-27&type=Issues) | [@Wh1isper](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AWh1isper+updated%3A2023-11-21..2023-11-27&type=Issues) + +## 2.11.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.10.1...e7c0f331d4cbf82eb1a9e9bc6c260faabda0255a)) + +### Enhancements made + +- Support get file(notebook) md5 [#1363](https://github.com/jupyter-server/jupyter_server/pull/1363) ([@Wh1isper](https://github.com/Wh1isper)) + +### Maintenance and upkeep improvements + +- Update ruff and typings [#1365](https://github.com/jupyter-server/jupyter_server/pull/1365) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Update api docs with md5 param [#1364](https://github.com/jupyter-server/jupyter_server/pull/1364) ([@Wh1isper](https://github.com/Wh1isper)) +- typo: ServerApp [#1361](https://github.com/jupyter-server/jupyter_server/pull/1361) ([@IITII](https://github.com/IITII)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-11-15&to=2023-11-21&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-11-15..2023-11-21&type=Issues) | [@IITII](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AIITII+updated%3A2023-11-15..2023-11-21&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-11-15..2023-11-21&type=Issues) | [@Wh1isper](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AWh1isper+updated%3A2023-11-15..2023-11-21&type=Issues) + +## 2.10.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.10.0...9f8ff2886903a6744c5eb483f9e5bd7e63d5d015)) + +### Bugs fixed + +- ContentsHandler return 404 rather than raise exc [#1357](https://github.com/jupyter-server/jupyter_server/pull/1357) ([@bloomsa](https://github.com/bloomsa)) + +### Maintenance and upkeep improvements + +- Clean up ruff config [#1358](https://github.com/jupyter-server/jupyter_server/pull/1358) ([@blink1073](https://github.com/blink1073)) +- Add more typings [#1356](https://github.com/jupyter-server/jupyter_server/pull/1356) ([@blink1073](https://github.com/blink1073)) +- chore: update pre-commit hooks [#1355](https://github.com/jupyter-server/jupyter_server/pull/1355) ([@pre-commit-ci](https://github.com/pre-commit-ci)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-11-06&to=2023-11-15&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-11-06..2023-11-15&type=Issues) | [@bloomsa](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abloomsa+updated%3A2023-11-06..2023-11-15&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2023-11-06..2023-11-15&type=Issues) + +## 2.10.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.9.1...e71e95884483c7ce2d9fd5ee83059a0269741aa1)) + +### Enhancements made + +- Update kernel env to reflect changes in session [#1354](https://github.com/jupyter-server/jupyter_server/pull/1354) ([@blink1073](https://github.com/blink1073)) + +### Maintenance and upkeep improvements + +- Clean up config and address warnings [#1353](https://github.com/jupyter-server/jupyter_server/pull/1353) ([@blink1073](https://github.com/blink1073)) +- Clean up lint and typing [#1351](https://github.com/jupyter-server/jupyter_server/pull/1351) ([@blink1073](https://github.com/blink1073)) +- Update typing for traitlets 5.13 [#1350](https://github.com/jupyter-server/jupyter_server/pull/1350) ([@blink1073](https://github.com/blink1073)) +- Update typings and fix tests [#1344](https://github.com/jupyter-server/jupyter_server/pull/1344) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-10-25&to=2023-11-06&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-10-25..2023-11-06&type=Issues) + +## 2.9.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.9.0...bb293ec5cac5b277259f27e458da60fa8a926f46)) + +### Bugs fixed + +- Revert "Update kernel env to reflect changes in session." [#1346](https://github.com/jupyter-server/jupyter_server/pull/1346) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-10-25&to=2023-10-25&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-10-25..2023-10-25&type=Issues) + +## 2.9.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.8.0...3438ddb16575155e98fc4f49700fff420088c8b0)) + +### Enhancements made + +- Ability to configure cull_idle_timeout with kernelSpec [#1342](https://github.com/jupyter-server/jupyter_server/pull/1342) ([@akshaychitneni](https://github.com/akshaychitneni)) +- Update kernel env to reflect changes in session. [#1341](https://github.com/jupyter-server/jupyter_server/pull/1341) ([@Carreau](https://github.com/Carreau)) + +### Bugs fixed + +- Run Gateway token renewers even if the auth token is empty. [#1340](https://github.com/jupyter-server/jupyter_server/pull/1340) ([@ojarjur](https://github.com/ojarjur)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-10-16&to=2023-10-25&type=c)) + +[@akshaychitneni](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aakshaychitneni+updated%3A2023-10-16..2023-10-25&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2023-10-16..2023-10-25&type=Issues) | [@ojarjur](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aojarjur+updated%3A2023-10-16..2023-10-25&type=Issues) + +## 2.8.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.7.3...a984e0771da5db4a14e9ac86a392ad3592b863e5)) + +### Enhancements made + +- Added Logs for get_os_path closes issue [#1336](https://github.com/jupyter-server/jupyter_server/pull/1336) ([@jayeshsingh9767](https://github.com/jayeshsingh9767)) + +### Bugs fixed + +- Avoid showing "No answer for 5s" when shutdown is slow [#1320](https://github.com/jupyter-server/jupyter_server/pull/1320) ([@minrk](https://github.com/minrk)) + +### Maintenance and upkeep improvements + +- Update typings for mypy 1.6 [#1337](https://github.com/jupyter-server/jupyter_server/pull/1337) ([@blink1073](https://github.com/blink1073)) +- chore: update pre-commit hooks [#1334](https://github.com/jupyter-server/jupyter_server/pull/1334) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Add typings to commonly used APIs [#1333](https://github.com/jupyter-server/jupyter_server/pull/1333) ([@blink1073](https://github.com/blink1073)) +- Update typings for traitlets 5.10 [#1330](https://github.com/jupyter-server/jupyter_server/pull/1330) ([@blink1073](https://github.com/blink1073)) +- Adopt sp-repo-review [#1324](https://github.com/jupyter-server/jupyter_server/pull/1324) ([@blink1073](https://github.com/blink1073)) +- Bump actions/checkout from 3 to 4 [#1321](https://github.com/jupyter-server/jupyter_server/pull/1321) ([@dependabot](https://github.com/dependabot)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-08-31&to=2023-10-16&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-08-31..2023-10-16&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adependabot+updated%3A2023-08-31..2023-10-16&type=Issues) | [@jayeshsingh9767](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajayeshsingh9767+updated%3A2023-08-31..2023-10-16&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-08-31..2023-10-16&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2023-08-31..2023-10-16&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-08-31..2023-10-16&type=Issues) + +## 2.7.3 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.7.2...e72bf7187e396605f46ba59567543ef6386e8920)) + +### New features added + +- Support external kernels [#1305](https://github.com/jupyter-server/jupyter_server/pull/1305) ([@davidbrochart](https://github.com/davidbrochart)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-08-18&to=2023-08-31&type=c)) + +[@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2023-08-18..2023-08-31&type=Issues) + +## 2.7.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.7.0...d8f4856c32b895106eac58c9c5768afd0e2f6465)) + +### Bugs fixed + +- accessing API version should not count as activity [#1315](https://github.com/jupyter-server/jupyter_server/pull/1315) ([@minrk](https://github.com/minrk)) +- Make kernel_id as a conditional optional field [#1300](https://github.com/jupyter-server/jupyter_server/pull/1300) ([@allstrive](https://github.com/allstrive)) +- Reference current_user to detect auth [#1294](https://github.com/jupyter-server/jupyter_server/pull/1294) ([@bhperry](https://github.com/bhperry)) + +### Maintenance and upkeep improvements + +- send2trash now supports deleting from different filesystem type(#1290) [#1291](https://github.com/jupyter-server/jupyter_server/pull/1291) ([@wqj97](https://github.com/wqj97)) + +### Documentation improvements + +- Add root `/api/` endpoint to REST spec [#1312](https://github.com/jupyter-server/jupyter_server/pull/1312) ([@minrk](https://github.com/minrk)) +- Fix broken link in doc [#1307](https://github.com/jupyter-server/jupyter_server/pull/1307) ([@Hind-M](https://github.com/Hind-M)) +- Rename notebook.auth.security.passwd->jupyter_server.auth.passwd in docs [#1306](https://github.com/jupyter-server/jupyter_server/pull/1306) ([@mathbunnyru](https://github.com/mathbunnyru)) +- Update notes link [#1298](https://github.com/jupyter-server/jupyter_server/pull/1298) ([@krassowski](https://github.com/krassowski)) +- docs: fix broken hyperlink to Tornado [#1297](https://github.com/jupyter-server/jupyter_server/pull/1297) ([@emmanuel-ferdman](https://github.com/emmanuel-ferdman)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-06-27&to=2023-08-15&type=c)) + +[@allstrive](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aallstrive+updated%3A2023-06-27..2023-08-15&type=Issues) | [@bhperry](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abhperry+updated%3A2023-06-27..2023-08-15&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-06-27..2023-08-15&type=Issues) | [@emmanuel-ferdman](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aemmanuel-ferdman+updated%3A2023-06-27..2023-08-15&type=Issues) | [@Hind-M](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AHind-M+updated%3A2023-06-27..2023-08-15&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2023-06-27..2023-08-15&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akrassowski+updated%3A2023-06-27..2023-08-15&type=Issues) | [@mathbunnyru](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amathbunnyru+updated%3A2023-06-27..2023-08-15&type=Issues) | [@matthewwiese](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amatthewwiese+updated%3A2023-06-27..2023-08-15&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-06-27..2023-08-15&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2023-06-27..2023-08-15&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-06-27..2023-08-15&type=Issues) | [@wqj97](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awqj97+updated%3A2023-06-27..2023-08-15&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2023-06-27..2023-08-15&type=Issues) + +## 2.7.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.6.0...b652f8d08530bd60ecf4cfffe6c32939fd94eb41)) + +### Bugs fixed + +- Add missing events to gateway client [#1288](https://github.com/jupyter-server/jupyter_server/pull/1288) ([@allstrive](https://github.com/allstrive)) + +### Maintenance and upkeep improvements + +- Handle test failures [#1289](https://github.com/jupyter-server/jupyter_server/pull/1289) ([@blink1073](https://github.com/blink1073)) +- Try testing against python 3.12 [#1282](https://github.com/jupyter-server/jupyter_server/pull/1282) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Remove frontend doc [#1292](https://github.com/jupyter-server/jupyter_server/pull/1292) ([@fcollonval](https://github.com/fcollonval)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-05-25&to=2023-06-27&type=c)) + +[@allstrive](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aallstrive+updated%3A2023-05-25..2023-06-27&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-05-25..2023-06-27&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afcollonval+updated%3A2023-05-25..2023-06-27&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2023-05-25..2023-06-27&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-05-25..2023-06-27&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2023-05-25..2023-06-27&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-05-25..2023-06-27&type=Issues) + +## 2.6.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.5.0...35b8e9cb68eec48fe9a017ac128cb776c2ead195)) + +### New features added + +- Emit events from the kernels service and gateway client [#1252](https://github.com/jupyter-server/jupyter_server/pull/1252) ([@rajmusuku](https://github.com/rajmusuku)) + +### Enhancements made + +- Allows immutable cache for static files in a directory [#1268](https://github.com/jupyter-server/jupyter_server/pull/1268) ([@brichet](https://github.com/brichet)) +- Merge the gateway handlers into the standard handlers. [#1261](https://github.com/jupyter-server/jupyter_server/pull/1261) ([@ojarjur](https://github.com/ojarjur)) +- Gateway manager retry kernel updates [#1256](https://github.com/jupyter-server/jupyter_server/pull/1256) ([@ojarjur](https://github.com/ojarjur)) +- Use debug-level messages for generating anonymous users [#1254](https://github.com/jupyter-server/jupyter_server/pull/1254) ([@hbcarlos](https://github.com/hbcarlos)) +- Define a CURRENT_JUPYTER_HANDLER context var [#1251](https://github.com/jupyter-server/jupyter_server/pull/1251) ([@Zsailer](https://github.com/Zsailer)) + +### Bugs fixed + +- Don't instantiate an unused Future in gateway connection trait [#1276](https://github.com/jupyter-server/jupyter_server/pull/1276) ([@minrk](https://github.com/minrk)) +- Write server list to stdout [#1275](https://github.com/jupyter-server/jupyter_server/pull/1275) ([@minrk](https://github.com/minrk)) +- Make the kernel_websocket_protocol flag reusable. [#1264](https://github.com/jupyter-server/jupyter_server/pull/1264) ([@ojarjur](https://github.com/ojarjur)) +- Register websocket handler from same module as kernel handlers [#1249](https://github.com/jupyter-server/jupyter_server/pull/1249) ([@kevin-bates](https://github.com/kevin-bates)) +- Re-enable websocket ping/pong from the server [#1243](https://github.com/jupyter-server/jupyter_server/pull/1243) ([@Zsailer](https://github.com/Zsailer)) +- Fix italics in operators security sections [#1242](https://github.com/jupyter-server/jupyter_server/pull/1242) ([@kevin-bates](https://github.com/kevin-bates)) +- Fix calculation of schema location [#1239](https://github.com/jupyter-server/jupyter_server/pull/1239) ([@lresende](https://github.com/lresende)) + +### Maintenance and upkeep improvements + +- Fix DeprecationWarning from pytest-console-scripts [#1281](https://github.com/jupyter-server/jupyter_server/pull/1281) ([@frenzymadness](https://github.com/frenzymadness)) +- Remove docutils and mistune pins [#1278](https://github.com/jupyter-server/jupyter_server/pull/1278) ([@blink1073](https://github.com/blink1073)) +- Update docutils requirement from \<0.20 to \<0.21 [#1277](https://github.com/jupyter-server/jupyter_server/pull/1277) ([@dependabot](https://github.com/dependabot)) +- Use Python 3.9 for the readthedocs builds [#1269](https://github.com/jupyter-server/jupyter_server/pull/1269) ([@ojarjur](https://github.com/ojarjur)) +- Fix coverage handling [#1257](https://github.com/jupyter-server/jupyter_server/pull/1257) ([@blink1073](https://github.com/blink1073)) +- chore: delete `.gitmodules` [#1248](https://github.com/jupyter-server/jupyter_server/pull/1248) ([@SauravMaheshkar](https://github.com/SauravMaheshkar)) +- chore: move `babel` and `eslint` configuration under `package.json` [#1246](https://github.com/jupyter-server/jupyter_server/pull/1246) ([@SauravMaheshkar](https://github.com/SauravMaheshkar)) + +### Documentation improvements + +- Fix typo in docs [#1270](https://github.com/jupyter-server/jupyter_server/pull/1270) ([@davidbrochart](https://github.com/davidbrochart)) +- Fix typo [#1262](https://github.com/jupyter-server/jupyter_server/pull/1262) ([@davidbrochart](https://github.com/davidbrochart)) +- Extends the IP documentation [#1258](https://github.com/jupyter-server/jupyter_server/pull/1258) ([@hbcarlos](https://github.com/hbcarlos)) +- Fix italics in operators security sections [#1242](https://github.com/jupyter-server/jupyter_server/pull/1242) ([@kevin-bates](https://github.com/kevin-bates)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-03-16&to=2023-05-25&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-03-16..2023-05-25&type=Issues) | [@brichet](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abrichet+updated%3A2023-03-16..2023-05-25&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-03-16..2023-05-25&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2023-03-16..2023-05-25&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adependabot+updated%3A2023-03-16..2023-05-25&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2023-03-16..2023-05-25&type=Issues) | [@frenzymadness](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afrenzymadness+updated%3A2023-03-16..2023-05-25&type=Issues) | [@hbcarlos](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ahbcarlos+updated%3A2023-03-16..2023-05-25&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2023-03-16..2023-05-25&type=Issues) | [@lresende](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Alresende+updated%3A2023-03-16..2023-05-25&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-03-16..2023-05-25&type=Issues) | [@ojarjur](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aojarjur+updated%3A2023-03-16..2023-05-25&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2023-03-16..2023-05-25&type=Issues) | [@rajmusuku](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Arajmusuku+updated%3A2023-03-16..2023-05-25&type=Issues) | [@SauravMaheshkar](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ASauravMaheshkar+updated%3A2023-03-16..2023-05-25&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-03-16..2023-05-25&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ayuvipanda+updated%3A2023-03-16..2023-05-25&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2023-03-16..2023-05-25&type=Issues) + +## 2.5.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.4.0...dc1eee8715dfe674560789caa5123dc895717ca1)) + +### Enhancements made + +- Enable KernelSpecResourceHandler to be async [#1236](https://github.com/jupyter-server/jupyter_server/pull/1236) ([@Zsailer](https://github.com/Zsailer)) +- Added error propagation to gateway_request function [#1233](https://github.com/jupyter-server/jupyter_server/pull/1233) ([@broden-wanner](https://github.com/broden-wanner)) + +### Maintenance and upkeep improvements + +- Update ruff [#1230](https://github.com/jupyter-server/jupyter_server/pull/1230) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-03-06&to=2023-03-16&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-03-06..2023-03-16&type=Issues) | [@broden-wanner](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abroden-wanner+updated%3A2023-03-06..2023-03-16&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-03-06..2023-03-16&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-03-06..2023-03-16&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2023-03-06..2023-03-16&type=Issues) + +## 2.4.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.3.0...4d311b2c91e055e7b4690d8100f7fe85381f06da)) + +### Enhancements made + +- Skip dir size check if not enumerable [#1227](https://github.com/jupyter-server/jupyter_server/pull/1227) ([@vidartf](https://github.com/vidartf)) +- Optimize hidden checks [#1226](https://github.com/jupyter-server/jupyter_server/pull/1226) ([@vidartf](https://github.com/vidartf)) +- Enable users to copy both files and directories [#1190](https://github.com/jupyter-server/jupyter_server/pull/1190) ([@kenyaachon](https://github.com/kenyaachon)) + +### Bugs fixed + +- Fix port selection [#1229](https://github.com/jupyter-server/jupyter_server/pull/1229) ([@blink1073](https://github.com/blink1073)) +- Fix priority of deprecated NotebookApp.notebook_dir behind ServerApp.root_dir (#1223 [#1223](https://github.com/jupyter-server/jupyter_server/pull/1223) ([@minrk](https://github.com/minrk)) +- Ensure content-type properly reflects gateway kernelspec resources [#1219](https://github.com/jupyter-server/jupyter_server/pull/1219) ([@kevin-bates](https://github.com/kevin-bates)) + +### Maintenance and upkeep improvements + +- fix docs build [#1225](https://github.com/jupyter-server/jupyter_server/pull/1225) ([@blink1073](https://github.com/blink1073)) +- Fix ci failures [#1222](https://github.com/jupyter-server/jupyter_server/pull/1222) ([@blink1073](https://github.com/blink1073)) +- Clean up license [#1218](https://github.com/jupyter-server/jupyter_server/pull/1218) ([@dcsaba89](https://github.com/dcsaba89)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-02-15&to=2023-03-06&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-02-15..2023-03-06&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2023-02-15..2023-03-06&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-02-15..2023-03-06&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2023-02-15..2023-03-06&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2023-02-15..2023-03-06&type=Issues) | [@dcsaba89](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adcsaba89+updated%3A2023-02-15..2023-03-06&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2023-02-15..2023-03-06&type=Issues) | [@kenyaachon](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akenyaachon+updated%3A2023-02-15..2023-03-06&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2023-02-15..2023-03-06&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-02-15..2023-03-06&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2023-02-15..2023-03-06&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-02-15..2023-03-06&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2023-02-15..2023-03-06&type=Issues) + +## 2.3.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.2.1...968c56c8c69aa545f7fe93243331fe140dac7c90)) + +### Enhancements made + +- Support IPV6 in \_find_http_port() [#1207](https://github.com/jupyter-server/jupyter_server/pull/1207) ([@schnell18](https://github.com/schnell18)) + +### Bugs fixed + +- Redact tokens, etc. in url parameters from request logs [#1212](https://github.com/jupyter-server/jupyter_server/pull/1212) ([@minrk](https://github.com/minrk)) +- Fix get_loader returning None when load_jupyter_server_extension is not found (#1193)Co-authored-by: pre-commit-ci\[bot\] \<66853113+pre-commit-ci\[bot\]@users.noreply.github.com> [#1193](https://github.com/jupyter-server/jupyter_server/pull/1193) ([@cmd-ntrf](https://github.com/cmd-ntrf)) + +### Maintenance and upkeep improvements + +- update LICENSE [#1197](https://github.com/jupyter-server/jupyter_server/pull/1197) ([@dcsaba89](https://github.com/dcsaba89)) +- Add license 2 (#1196 [#1196](https://github.com/jupyter-server/jupyter_server/pull/1196) ([@dcsaba89](https://github.com/dcsaba89)) + +### Documentation improvements + +- Update jupyterhub security link [#1200](https://github.com/jupyter-server/jupyter_server/pull/1200) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-02-02&to=2023-02-15&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-02-02..2023-02-15&type=Issues) | [@cmd-ntrf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acmd-ntrf+updated%3A2023-02-02..2023-02-15&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-02-02..2023-02-15&type=Issues) | [@dcsaba89](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adcsaba89+updated%3A2023-02-02..2023-02-15&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksdev+updated%3A2023-02-02..2023-02-15&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-02-02..2023-02-15&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2023-02-02..2023-02-15&type=Issues) | [@schnell18](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aschnell18+updated%3A2023-02-02..2023-02-15&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-02-02..2023-02-15&type=Issues) + +## 2.2.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.2.0...0f9556b48d7699bd2d246222067b1cb215d44c28)) + +### Maintenance and upkeep improvements + +- Delete the extra "or" in front of the second url [#1194](https://github.com/jupyter-server/jupyter_server/pull/1194) ([@jonnygrout](https://github.com/jonnygrout)) +- remove upper bound on anyio [#1192](https://github.com/jupyter-server/jupyter_server/pull/1192) ([@minrk](https://github.com/minrk)) +- Adopt more lint rules [#1189](https://github.com/jupyter-server/jupyter_server/pull/1189) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-01-31&to=2023-02-02&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-01-31..2023-02-02&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-01-31..2023-02-02&type=Issues) | [@jonnygrout](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajonnygrout+updated%3A2023-01-31..2023-02-02&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-01-31..2023-02-02&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-01-31..2023-02-02&type=Issues) + +## 2.2.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.1.0...b6c1edb0b205f8d53f1a2e81abb997bfc693f144)) + +### Enhancements made + +- Only load enabled extension packages [#1180](https://github.com/jupyter-server/jupyter_server/pull/1180) ([@minrk](https://github.com/minrk)) +- Pass in a logger to get_metadata [#1176](https://github.com/jupyter-server/jupyter_server/pull/1176) ([@yuvipanda](https://github.com/yuvipanda)) + +### Bugs fixed + +- Don't assume that resources entries are relative [#1182](https://github.com/jupyter-server/jupyter_server/pull/1182) ([@ojarjur](https://github.com/ojarjur)) + +### Maintenance and upkeep improvements + +- Updates for client 8 [#1188](https://github.com/jupyter-server/jupyter_server/pull/1188) ([@blink1073](https://github.com/blink1073)) +- Use repr in logging for exception. [#1185](https://github.com/jupyter-server/jupyter_server/pull/1185) ([@Carreau](https://github.com/Carreau)) +- Update example npm deps [#1184](https://github.com/jupyter-server/jupyter_server/pull/1184) ([@blink1073](https://github.com/blink1073)) +- Fix docs and examples [#1183](https://github.com/jupyter-server/jupyter_server/pull/1183) ([@blink1073](https://github.com/blink1073)) +- Update jupyter client api docs links [#1179](https://github.com/jupyter-server/jupyter_server/pull/1179) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-01-13&to=2023-01-31&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-01-13..2023-01-31&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2023-01-13..2023-01-31&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-01-13..2023-01-31&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2023-01-13..2023-01-31&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-01-13..2023-01-31&type=Issues) | [@ojarjur](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aojarjur+updated%3A2023-01-13..2023-01-31&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-01-13..2023-01-31&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ayuvipanda+updated%3A2023-01-13..2023-01-31&type=Issues) + +## 2.1.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.7...34f509d8da1710039634bc16f5336570c4861bcd)) + +### Bugs fixed + +- Fix preferred_dir for sync contents manager [#1173](https://github.com/jupyter-server/jupyter_server/pull/1173) ([@vidartf](https://github.com/vidartf)) + +### Maintenance and upkeep improvements + +- Update typing and warning handling [#1174](https://github.com/jupyter-server/jupyter_server/pull/1174) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Add api docs [#1159](https://github.com/jupyter-server/jupyter_server/pull/1159) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-01-12&to=2023-01-12&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-01-12..2023-01-12&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-01-12..2023-01-12&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2023-01-12..2023-01-12&type=Issues) + +## 2.0.7 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.6...5cce2afcbeeb44581e9b29ab27fef75a12d651ca)) + +### Enhancements made + +- Log how long each extension module takes to import [#1171](https://github.com/jupyter-server/jupyter_server/pull/1171) ([@yuvipanda](https://github.com/yuvipanda)) +- Set JPY_SESSION_NAME to full notebook path. [#1100](https://github.com/jupyter-server/jupyter_server/pull/1100) ([@Carreau](https://github.com/Carreau)) + +### Bugs fixed + +- Reapply preferred_dir fix, now with better backwards compatibility [#1162](https://github.com/jupyter-server/jupyter_server/pull/1162) ([@vidartf](https://github.com/vidartf)) + +### Maintenance and upkeep improvements + +- Update example to use hatch [#1169](https://github.com/jupyter-server/jupyter_server/pull/1169) ([@blink1073](https://github.com/blink1073)) +- Clean up docs build and typing [#1168](https://github.com/jupyter-server/jupyter_server/pull/1168) ([@blink1073](https://github.com/blink1073)) +- Fix check release by ignoring duplicate file name in wheel [#1163](https://github.com/jupyter-server/jupyter_server/pull/1163) ([@blink1073](https://github.com/blink1073)) +- Fix broken link in warning message [#1158](https://github.com/jupyter-server/jupyter_server/pull/1158) ([@consideRatio](https://github.com/consideRatio)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-12-29&to=2023-01-12&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-12-29..2023-01-12&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2022-12-29..2023-01-12&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-12-29..2023-01-12&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AconsideRatio+updated%3A2022-12-29..2023-01-12&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksdev+updated%3A2022-12-29..2023-01-12&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-12-29..2023-01-12&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2022-12-29..2023-01-12&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-12-29..2023-01-12&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ayuvipanda+updated%3A2022-12-29..2023-01-12&type=Issues) + +## 2.0.6 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.5...73d577610cda544e85139842c4186f7e77197440)) + +### Bugs fixed + +- Iterate through set of apps in `extension_manager.any_activity` method [#1157](https://github.com/jupyter-server/jupyter_server/pull/1157) ([@mahendrapaipuri](https://github.com/mahendrapaipuri)) + +### Maintenance and upkeep improvements + +- Handle flake8-errmsg [#1155](https://github.com/jupyter-server/jupyter_server/pull/1155) ([@blink1073](https://github.com/blink1073)) +- Add spelling and docstring enforcement [#1147](https://github.com/jupyter-server/jupyter_server/pull/1147) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Add spelling and docstring enforcement [#1147](https://github.com/jupyter-server/jupyter_server/pull/1147) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-12-23&to=2022-12-29&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-12-23..2022-12-29&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-12-23..2022-12-29&type=Issues) | [@mahendrapaipuri](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amahendrapaipuri+updated%3A2022-12-23..2022-12-29&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-12-23..2022-12-29&type=Issues) + +## 2.0.5 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.4...ec9029f07fe377ebb86b77e0eadd159fc9288c98)) + +### Bugs fixed + +- Remove `end` kwarg after migration from print to info [#1151](https://github.com/jupyter-server/jupyter_server/pull/1151) ([@krassowski](https://github.com/krassowski)) + +### Maintenance and upkeep improvements + +- Import ensure-sync directly from dependence. [#1149](https://github.com/jupyter-server/jupyter_server/pull/1149) ([@Carreau](https://github.com/Carreau)) +- Update deprecation warning [#1148](https://github.com/jupyter-server/jupyter_server/pull/1148) ([@Carreau](https://github.com/Carreau)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-12-21&to=2022-12-23&type=c)) + +[@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2022-12-21..2022-12-23&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-12-21..2022-12-23&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akrassowski+updated%3A2022-12-21..2022-12-23&type=Issues) + +## 2.0.4 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.3...53377e25efe0faf4e2a984254ca2c301aeea096d)) + +### Bugs fixed + +- Fix handling of extension last activity [#1145](https://github.com/jupyter-server/jupyter_server/pull/1145) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-12-21&to=2022-12-21&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-12-21..2022-12-21&type=Issues) + +## 2.0.3 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.2...e35fbbc238a5b96d869c574fe8b8eb27b9605a05)) + +### Bugs fixed + +- Restore default writing of browser open redirect file, add opt-in to skip [#1144](https://github.com/jupyter-server/jupyter_server/pull/1144) ([@bollwyvl](https://github.com/bollwyvl)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-12-20&to=2022-12-21&type=c)) + +[@bollwyvl](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abollwyvl+updated%3A2022-12-20..2022-12-21&type=Issues) + +## 2.0.2 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.1...b5b7c5e9141698ab0206f74b8944972cbc4cf6fe)) + +### Bugs fixed + +- Raise errors on individual problematic extensions when listing extension [#1139](https://github.com/jupyter-server/jupyter_server/pull/1139) ([@Zsailer](https://github.com/Zsailer)) +- Find an available port before starting event loop [#1136](https://github.com/jupyter-server/jupyter_server/pull/1136) ([@blink1073](https://github.com/blink1073)) +- only write browser files if we're launching the browser [#1133](https://github.com/jupyter-server/jupyter_server/pull/1133) ([@hhuuggoo](https://github.com/hhuuggoo)) +- Logging message used to list sessions fails with template error [#1132](https://github.com/jupyter-server/jupyter_server/pull/1132) ([@vindex10](https://github.com/vindex10)) +- Include base_url at start of kernelspec resources path [#1124](https://github.com/jupyter-server/jupyter_server/pull/1124) ([@bloomsa](https://github.com/bloomsa)) + +### Maintenance and upkeep improvements + +- Fix lint rule [#1128](https://github.com/jupyter-server/jupyter_server/pull/1128) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-12-08&to=2022-12-20&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-12-08..2022-12-20&type=Issues) | [@bloomsa](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abloomsa+updated%3A2022-12-08..2022-12-20&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-12-08..2022-12-20&type=Issues) | [@hhuuggoo](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ahhuuggoo+updated%3A2022-12-08..2022-12-20&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-12-08..2022-12-20&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2022-12-08..2022-12-20&type=Issues) | [@vindex10](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avindex10+updated%3A2022-12-08..2022-12-20&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-12-08..2022-12-20&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-12-08..2022-12-20&type=Issues) + +## 2.0.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0...a400c0e0de56b1abe821ce26875fad9e7e711596)) + +### Enhancements made + +- \[Gateway\] Remove redundant list kernels request during session poll [#1112](https://github.com/jupyter-server/jupyter_server/pull/1112) ([@kevin-bates](https://github.com/kevin-bates)) + +### Maintenance and upkeep improvements + +- Fix jupyter_core pinning [#1122](https://github.com/jupyter-server/jupyter_server/pull/1122) ([@ophie200](https://github.com/ophie200)) +- Update docutils requirement from \<0.19 to \<0.20 [#1120](https://github.com/jupyter-server/jupyter_server/pull/1120) ([@dependabot](https://github.com/dependabot)) +- Adopt ruff and use less pre-commit [#1114](https://github.com/jupyter-server/jupyter_server/pull/1114) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-12-06&to=2022-12-08&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-12-06..2022-12-08&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-12-06..2022-12-08&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adependabot+updated%3A2022-12-06..2022-12-08&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-12-06..2022-12-08&type=Issues) | [@ofek](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aofek+updated%3A2022-12-06..2022-12-08&type=Issues) | [@ophie200](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aophie200+updated%3A2022-12-06..2022-12-08&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-12-06..2022-12-08&type=Issues) + +## 2.0.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/6d0803b...312327fc498e3b96f7334c36b2623389d4f79b33)) + +### Enhancements made + +- Introduce ServerKernelManager class [#1101](https://github.com/jupyter-server/jupyter_server/pull/1101) ([@kevin-bates](https://github.com/kevin-bates)) +- New configurable/overridable kernel ZMQ+Websocket connection API [#1047](https://github.com/jupyter-server/jupyter_server/pull/1047) ([@Zsailer](https://github.com/Zsailer)) +- Pass kernel environment to `cwd_for_path` method [#1046](https://github.com/jupyter-server/jupyter_server/pull/1046) ([@divyansshhh](https://github.com/divyansshhh)) +- Better Handling of Asyncio [#1035](https://github.com/jupyter-server/jupyter_server/pull/1035) ([@blink1073](https://github.com/blink1073)) +- Add authorization to AuthenticatedFileHandler [#1021](https://github.com/jupyter-server/jupyter_server/pull/1021) ([@jiajunjie](https://github.com/jiajunjie)) +- \[Gateway\] Add support for gateway token renewal [#985](https://github.com/jupyter-server/jupyter_server/pull/985) ([@kevin-bates](https://github.com/kevin-bates)) +- Make it easier to pass custom env variables to kernel [#981](https://github.com/jupyter-server/jupyter_server/pull/981) ([@divyansshhh](https://github.com/divyansshhh)) +- Accept and manage cookies when requesting gateways [#969](https://github.com/jupyter-server/jupyter_server/pull/969) ([@wjsi](https://github.com/wjsi)) +- Emit events from the Contents Service [#954](https://github.com/jupyter-server/jupyter_server/pull/954) ([@Zsailer](https://github.com/Zsailer)) +- Retry certain errors between server and gateway [#944](https://github.com/jupyter-server/jupyter_server/pull/944) ([@kevin-bates](https://github.com/kevin-bates)) +- Allow new file types [#895](https://github.com/jupyter-server/jupyter_server/pull/895) ([@davidbrochart](https://github.com/davidbrochart)) +- Make it easier for extensions to customize the ServerApp [#879](https://github.com/jupyter-server/jupyter_server/pull/879) ([@minrk](https://github.com/minrk)) +- Adds anonymous users [#863](https://github.com/jupyter-server/jupyter_server/pull/863) ([@hbcarlos](https://github.com/hbcarlos)) +- switch to jupyter_events [#862](https://github.com/jupyter-server/jupyter_server/pull/862) ([@Zsailer](https://github.com/Zsailer)) +- consolidate auth config on IdentityProvider [#825](https://github.com/jupyter-server/jupyter_server/pull/825) ([@minrk](https://github.com/minrk)) + +### Bugs fixed + +- Fix kernel WebSocket protocol [#1110](https://github.com/jupyter-server/jupyter_server/pull/1110) ([@davidbrochart](https://github.com/davidbrochart)) +- Defer webbrowser import [#1095](https://github.com/jupyter-server/jupyter_server/pull/1095) ([@blink1073](https://github.com/blink1073)) +- Use handle_outgoing_message for ZMQ replies [#1089](https://github.com/jupyter-server/jupyter_server/pull/1089) ([@Zsailer](https://github.com/Zsailer)) +- Call `ports_changed` on the multi-kernel-manager instead of the kernel manager [#1088](https://github.com/jupyter-server/jupyter_server/pull/1088) ([@Zsailer](https://github.com/Zsailer)) +- Add more websocket connection tests and fix bugs [#1085](https://github.com/jupyter-server/jupyter_server/pull/1085) ([@blink1073](https://github.com/blink1073)) +- Tornado WebSocketHandler fixup [#1083](https://github.com/jupyter-server/jupyter_server/pull/1083) ([@davidbrochart](https://github.com/davidbrochart)) +- persist userid cookie when auth is disabled [#1076](https://github.com/jupyter-server/jupyter_server/pull/1076) ([@minrk](https://github.com/minrk)) +- Fix rename_file and delete_file to handle hidden files properly [#1073](https://github.com/jupyter-server/jupyter_server/pull/1073) ([@yacchin1205](https://github.com/yacchin1205)) +- Add more coverage [#1069](https://github.com/jupyter-server/jupyter_server/pull/1069) ([@blink1073](https://github.com/blink1073)) +- Increase nbconvert and checkpoints coverage [#1066](https://github.com/jupyter-server/jupyter_server/pull/1066) ([@blink1073](https://github.com/blink1073)) +- Fix min version check again [#1049](https://github.com/jupyter-server/jupyter_server/pull/1049) ([@blink1073](https://github.com/blink1073)) +- Fallback new file type to file for contents put [#1013](https://github.com/jupyter-server/jupyter_server/pull/1013) ([@a3626a](https://github.com/a3626a)) +- Fix some typos in release instructions [#1003](https://github.com/jupyter-server/jupyter_server/pull/1003) ([@kevin-bates](https://github.com/kevin-bates)) +- Wrap the concurrent futures in an asyncio future [#1001](https://github.com/jupyter-server/jupyter_server/pull/1001) ([@blink1073](https://github.com/blink1073)) +- \[Gateway\] Fix and deprecate env whitelist handling [#979](https://github.com/jupyter-server/jupyter_server/pull/979) ([@kevin-bates](https://github.com/kevin-bates)) +- fix issues with jupyter_events 0.5.0 [#972](https://github.com/jupyter-server/jupyter_server/pull/972) ([@Zsailer](https://github.com/Zsailer)) +- Correct content-type headers [#965](https://github.com/jupyter-server/jupyter_server/pull/965) ([@epignot](https://github.com/epignot)) +- Don't validate certs for when stopping server [#959](https://github.com/jupyter-server/jupyter_server/pull/959) ([@Zsailer](https://github.com/Zsailer)) +- Parse list value for `terminado_settings` [#949](https://github.com/jupyter-server/jupyter_server/pull/949) ([@krassowski](https://github.com/krassowski)) +- Fix bug in `api/contents` requests for an allowed copy [#939](https://github.com/jupyter-server/jupyter_server/pull/939) ([@kiersten-stokes](https://github.com/kiersten-stokes)) +- Fix error that prevents posting to `api/contents` endpoint with no body [#937](https://github.com/jupyter-server/jupyter_server/pull/937) ([@kiersten-stokes](https://github.com/kiersten-stokes)) +- avoid creating asyncio.Lock at import time [#935](https://github.com/jupyter-server/jupyter_server/pull/935) ([@minrk](https://github.com/minrk)) +- Fix `get_kernel_path` for `AsyncFileManager`s. [#929](https://github.com/jupyter-server/jupyter_server/pull/929) ([@thetorpedodog](https://github.com/thetorpedodog)) +- Fix c.GatewayClient.url snippet syntax [#917](https://github.com/jupyter-server/jupyter_server/pull/917) ([@rickwierenga](https://github.com/rickwierenga)) +- Add back support for kernel launch timeout pad [#910](https://github.com/jupyter-server/jupyter_server/pull/910) ([@CiprianAnton](https://github.com/CiprianAnton)) +- Notify ChannelQueue that the response router thread is finishing [#896](https://github.com/jupyter-server/jupyter_server/pull/896) ([@CiprianAnton](https://github.com/CiprianAnton)) +- Make ChannelQueue.get_msg true async [#892](https://github.com/jupyter-server/jupyter_server/pull/892) ([@CiprianAnton](https://github.com/CiprianAnton)) +- Check for serverapp for reraise flag [#887](https://github.com/jupyter-server/jupyter_server/pull/887) ([@vidartf](https://github.com/vidartf)) + +### Maintenance and upkeep improvements + +- Make tests less sensitive to default kernel name [#1118](https://github.com/jupyter-server/jupyter_server/pull/1118) ([@blink1073](https://github.com/blink1073)) +- Tweak codecov settings [#1113](https://github.com/jupyter-server/jupyter_server/pull/1113) ([@blink1073](https://github.com/blink1073)) +- Bump minimatch from 3.0.4 to 3.1.2 [#1109](https://github.com/jupyter-server/jupyter_server/pull/1109) ([@dependabot](https://github.com/dependabot)) +- Add skip-if-exists config [#1108](https://github.com/jupyter-server/jupyter_server/pull/1108) ([@blink1073](https://github.com/blink1073)) +- Use pytest-jupyter [#1099](https://github.com/jupyter-server/jupyter_server/pull/1099) ([@blink1073](https://github.com/blink1073)) +- Clean up release instructions and coverage handling [#1098](https://github.com/jupyter-server/jupyter_server/pull/1098) ([@blink1073](https://github.com/blink1073)) +- Import ensure_async from jupyter_core [#1093](https://github.com/jupyter-server/jupyter_server/pull/1093) ([@davidbrochart](https://github.com/davidbrochart)) +- Add more tests [#1092](https://github.com/jupyter-server/jupyter_server/pull/1092) ([@blink1073](https://github.com/blink1073)) +- Fix coverage upload [#1091](https://github.com/jupyter-server/jupyter_server/pull/1091) ([@blink1073](https://github.com/blink1073)) +- Add base handler tests [#1090](https://github.com/jupyter-server/jupyter_server/pull/1090) ([@blink1073](https://github.com/blink1073)) +- Add more websocket connection tests and fix bugs [#1085](https://github.com/jupyter-server/jupyter_server/pull/1085) ([@blink1073](https://github.com/blink1073)) +- Use base setup dependency type [#1084](https://github.com/jupyter-server/jupyter_server/pull/1084) ([@blink1073](https://github.com/blink1073)) +- Add more serverapp tests [#1079](https://github.com/jupyter-server/jupyter_server/pull/1079) ([@blink1073](https://github.com/blink1073)) +- Add more gateway tests [#1078](https://github.com/jupyter-server/jupyter_server/pull/1078) ([@blink1073](https://github.com/blink1073)) +- More cleanup [#1077](https://github.com/jupyter-server/jupyter_server/pull/1077) ([@blink1073](https://github.com/blink1073)) +- Fix hatch scripts and windows workflow run [#1074](https://github.com/jupyter-server/jupyter_server/pull/1074) ([@blink1073](https://github.com/blink1073)) +- use recommended github-workflows checker [#1071](https://github.com/jupyter-server/jupyter_server/pull/1071) ([@blink1073](https://github.com/blink1073)) +- Add more coverage [#1069](https://github.com/jupyter-server/jupyter_server/pull/1069) ([@blink1073](https://github.com/blink1073)) +- More coverage [#1067](https://github.com/jupyter-server/jupyter_server/pull/1067) ([@blink1073](https://github.com/blink1073)) +- Increase nbconvert and checkpoints coverage [#1066](https://github.com/jupyter-server/jupyter_server/pull/1066) ([@blink1073](https://github.com/blink1073)) +- Test downstream jupyter_server_terminals [#1065](https://github.com/jupyter-server/jupyter_server/pull/1065) ([@blink1073](https://github.com/blink1073)) +- Test notebook prerelease [#1064](https://github.com/jupyter-server/jupyter_server/pull/1064) ([@blink1073](https://github.com/blink1073)) +- MAINT: remove python 3.4 branch [#1061](https://github.com/jupyter-server/jupyter_server/pull/1061) ([@Carreau](https://github.com/Carreau)) +- Bump actions/checkout from 2 to 3 [#1056](https://github.com/jupyter-server/jupyter_server/pull/1056) ([@dependabot](https://github.com/dependabot)) +- Bump actions/setup-python from 2 to 4 [#1055](https://github.com/jupyter-server/jupyter_server/pull/1055) ([@dependabot](https://github.com/dependabot)) +- Bump pre-commit/action from 2.0.0 to 3.0.0 [#1054](https://github.com/jupyter-server/jupyter_server/pull/1054) ([@dependabot](https://github.com/dependabot)) +- Add dependabot file [#1053](https://github.com/jupyter-server/jupyter_server/pull/1053) ([@blink1073](https://github.com/blink1073)) +- Use global env for min version check [#1048](https://github.com/jupyter-server/jupyter_server/pull/1048) ([@blink1073](https://github.com/blink1073)) +- Clean up handling of synchronous managers [#1044](https://github.com/jupyter-server/jupyter_server/pull/1044) ([@blink1073](https://github.com/blink1073)) +- Clean up config files [#1031](https://github.com/jupyter-server/jupyter_server/pull/1031) ([@blink1073](https://github.com/blink1073)) +- Make node optional [#1030](https://github.com/jupyter-server/jupyter_server/pull/1030) ([@blink1073](https://github.com/blink1073)) +- Use admin github token for releaser [#1025](https://github.com/jupyter-server/jupyter_server/pull/1025) ([@blink1073](https://github.com/blink1073)) +- CI Cleanup [#1023](https://github.com/jupyter-server/jupyter_server/pull/1023) ([@blink1073](https://github.com/blink1073)) +- Use mdformat instead of prettier [#1022](https://github.com/jupyter-server/jupyter_server/pull/1022) ([@blink1073](https://github.com/blink1073)) +- Add pyproject validation [#1020](https://github.com/jupyter-server/jupyter_server/pull/1020) ([@blink1073](https://github.com/blink1073)) +- Remove hardcoded client install in CI [#1019](https://github.com/jupyter-server/jupyter_server/pull/1019) ([@blink1073](https://github.com/blink1073)) +- Handle client 8 pending kernels [#1014](https://github.com/jupyter-server/jupyter_server/pull/1014) ([@blink1073](https://github.com/blink1073)) +- Use releaser v2 tag [#1010](https://github.com/jupyter-server/jupyter_server/pull/1010) ([@blink1073](https://github.com/blink1073)) +- Use hatch environments to simplify test, coverage, and docs build [#1007](https://github.com/jupyter-server/jupyter_server/pull/1007) ([@blink1073](https://github.com/blink1073)) +- Update to version2 releaser [#1006](https://github.com/jupyter-server/jupyter_server/pull/1006) ([@blink1073](https://github.com/blink1073)) +- Do not use dev version yet [#999](https://github.com/jupyter-server/jupyter_server/pull/999) ([@blink1073](https://github.com/blink1073)) +- Add workflows for simplified publish [#993](https://github.com/jupyter-server/jupyter_server/pull/993) ([@blink1073](https://github.com/blink1073)) +- Remove hardcoded client install [#991](https://github.com/jupyter-server/jupyter_server/pull/991) ([@blink1073](https://github.com/blink1073)) +- Test with client 8 updates [#988](https://github.com/jupyter-server/jupyter_server/pull/988) ([@blink1073](https://github.com/blink1073)) +- Switch to using hatchling version command [#984](https://github.com/jupyter-server/jupyter_server/pull/984) ([@blink1073](https://github.com/blink1073)) +- Run downstream tests in parallel [#973](https://github.com/jupyter-server/jupyter_server/pull/973) ([@blink1073](https://github.com/blink1073)) +- Update pytest_plugin with fixtures to test auth in core and extensions [#956](https://github.com/jupyter-server/jupyter_server/pull/956) ([@akshaychitneni](https://github.com/akshaychitneni)) +- Fix docs build [#952](https://github.com/jupyter-server/jupyter_server/pull/952) ([@blink1073](https://github.com/blink1073)) +- Fix flake8 v5 compat [#941](https://github.com/jupyter-server/jupyter_server/pull/941) ([@blink1073](https://github.com/blink1073)) +- Improve logging of bare exceptions and other cleanups. [#922](https://github.com/jupyter-server/jupyter_server/pull/922) ([@thetorpedodog](https://github.com/thetorpedodog)) +- Use more explicit version template for pyproject [#919](https://github.com/jupyter-server/jupyter_server/pull/919) ([@blink1073](https://github.com/blink1073)) +- Fix handling of dev version [#913](https://github.com/jupyter-server/jupyter_server/pull/913) ([@blink1073](https://github.com/blink1073)) +- Fix owasp link [#908](https://github.com/jupyter-server/jupyter_server/pull/908) ([@blink1073](https://github.com/blink1073)) +- default to system node version in precommit [#906](https://github.com/jupyter-server/jupyter_server/pull/906) ([@dlqqq](https://github.com/dlqqq)) +- Test python 3.11 on ubuntu [#839](https://github.com/jupyter-server/jupyter_server/pull/839) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Remove left over from notebook [#1117](https://github.com/jupyter-server/jupyter_server/pull/1117) ([@fcollonval](https://github.com/fcollonval)) +- Fix wording [#1037](https://github.com/jupyter-server/jupyter_server/pull/1037) ([@fcollonval](https://github.com/fcollonval)) +- Fix GitHub actions badge link [#1011](https://github.com/jupyter-server/jupyter_server/pull/1011) ([@blink1073](https://github.com/blink1073)) +- Pin docutils to fix docs build [#1004](https://github.com/jupyter-server/jupyter_server/pull/1004) ([@blink1073](https://github.com/blink1073)) +- Update server extension disable instructions [#998](https://github.com/jupyter-server/jupyter_server/pull/998) ([@3coins](https://github.com/3coins)) +- Update index.rst [#970](https://github.com/jupyter-server/jupyter_server/pull/970) ([@razrotenberg](https://github.com/razrotenberg)) +- Fix typo in IdentityProvider documentation [#915](https://github.com/jupyter-server/jupyter_server/pull/915) ([@danielyahn](https://github.com/danielyahn)) +- docs: document the logging_config trait [#844](https://github.com/jupyter-server/jupyter_server/pull/844) ([@oliver-sanders](https://github.com/oliver-sanders)) + +### Deprecated features + +- \[Gateway\] Fix and deprecate env whitelist handling [#979](https://github.com/jupyter-server/jupyter_server/pull/979) ([@kevin-bates](https://github.com/kevin-bates)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-06-23&to=2022-12-06&type=c)) + +[@3coins](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3A3coins+updated%3A2022-06-23..2022-12-06&type=Issues) | [@a3626a](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aa3626a+updated%3A2022-06-23..2022-12-06&type=Issues) | [@akshaychitneni](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aakshaychitneni+updated%3A2022-06-23..2022-12-06&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-06-23..2022-12-06&type=Issues) | [@bloomsa](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abloomsa+updated%3A2022-06-23..2022-12-06&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2022-06-23..2022-12-06&type=Issues) | [@CiprianAnton](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACiprianAnton+updated%3A2022-06-23..2022-12-06&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-06-23..2022-12-06&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-06-23..2022-12-06&type=Issues) | [@danielyahn](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adanielyahn+updated%3A2022-06-23..2022-12-06&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2022-06-23..2022-12-06&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adependabot+updated%3A2022-06-23..2022-12-06&type=Issues) | [@divyansshhh](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adivyansshhh+updated%3A2022-06-23..2022-12-06&type=Issues) | [@dlqqq](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adlqqq+updated%3A2022-06-23..2022-12-06&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2022-06-23..2022-12-06&type=Issues) | [@ellisonbg](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aellisonbg+updated%3A2022-06-23..2022-12-06&type=Issues) | [@epignot](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aepignot+updated%3A2022-06-23..2022-12-06&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afcollonval+updated%3A2022-06-23..2022-12-06&type=Issues) | [@hbcarlos](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ahbcarlos+updated%3A2022-06-23..2022-12-06&type=Issues) | [@jiajunjie](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajiajunjie+updated%3A2022-06-23..2022-12-06&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-06-23..2022-12-06&type=Issues) | [@kiersten-stokes](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akiersten-stokes+updated%3A2022-06-23..2022-12-06&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akrassowski+updated%3A2022-06-23..2022-12-06&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksdev+updated%3A2022-06-23..2022-12-06&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2022-06-23..2022-12-06&type=Issues) | [@ofek](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aofek+updated%3A2022-06-23..2022-12-06&type=Issues) | [@oliver-sanders](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aoliver-sanders+updated%3A2022-06-23..2022-12-06&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-06-23..2022-12-06&type=Issues) | [@razrotenberg](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Arazrotenberg+updated%3A2022-06-23..2022-12-06&type=Issues) | [@rickwierenga](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Arickwierenga+updated%3A2022-06-23..2022-12-06&type=Issues) | [@thetorpedodog](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Athetorpedodog+updated%3A2022-06-23..2022-12-06&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2022-06-23..2022-12-06&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-06-23..2022-12-06&type=Issues) | [@wjsi](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awjsi+updated%3A2022-06-23..2022-12-06&type=Issues) | [@yacchin1205](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ayacchin1205+updated%3A2022-06-23..2022-12-06&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-06-23..2022-12-06&type=Issues) + +## 2.0.0rc8 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0rc7...d2c974a4580e9269580a632a3c8258e99792e279)) + +### Enhancements made + +- Introduce ServerKernelManager class [#1101](https://github.com/jupyter-server/jupyter_server/pull/1101) ([@kevin-bates](https://github.com/kevin-bates)) + +### Bugs fixed + +- Defer webbrowser import [#1095](https://github.com/jupyter-server/jupyter_server/pull/1095) ([@blink1073](https://github.com/blink1073)) + +### Maintenance and upkeep improvements + +- Use pytest-jupyter [#1099](https://github.com/jupyter-server/jupyter_server/pull/1099) ([@blink1073](https://github.com/blink1073)) +- Clean up release instructions and coverage handling [#1098](https://github.com/jupyter-server/jupyter_server/pull/1098) ([@blink1073](https://github.com/blink1073)) +- Add more tests [#1092](https://github.com/jupyter-server/jupyter_server/pull/1092) ([@blink1073](https://github.com/blink1073)) +- Fix coverage upload [#1091](https://github.com/jupyter-server/jupyter_server/pull/1091) ([@blink1073](https://github.com/blink1073)) +- Add base handler tests [#1090](https://github.com/jupyter-server/jupyter_server/pull/1090) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-11-23&to=2022-11-29&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-11-23..2022-11-29&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-11-23..2022-11-29&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-11-23..2022-11-29&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-11-23..2022-11-29&type=Issues) + +## 2.0.0rc7 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0rc6...339038b532ec928b59861f9426a8ba1214454741)) + +### Bugs fixed + +- Use handle_outgoing_message for ZMQ replies [#1089](https://github.com/jupyter-server/jupyter_server/pull/1089) ([@Zsailer](https://github.com/Zsailer)) +- Call `ports_changed` on the multi-kernel-manager instead of the kernel manager [#1088](https://github.com/jupyter-server/jupyter_server/pull/1088) ([@Zsailer](https://github.com/Zsailer)) +- Add more websocket connection tests and fix bugs [#1085](https://github.com/jupyter-server/jupyter_server/pull/1085) ([@blink1073](https://github.com/blink1073)) + +### Maintenance and upkeep improvements + +- Add more websocket connection tests and fix bugs [#1085](https://github.com/jupyter-server/jupyter_server/pull/1085) ([@blink1073](https://github.com/blink1073)) +- Use base setup dependency type [#1084](https://github.com/jupyter-server/jupyter_server/pull/1084) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-11-21&to=2022-11-23&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-11-21..2022-11-23&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-11-21..2022-11-23&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-11-21..2022-11-23&type=Issues) + +## 2.0.0rc6 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0rc5...cd060da67aa6e3e5d8ff791f0a559a91282be2b3)) + +### Bugs fixed + +- Tornado WebSocketHandler fixup [#1083](https://github.com/jupyter-server/jupyter_server/pull/1083) ([@davidbrochart](https://github.com/davidbrochart)) + +### Maintenance and upkeep improvements + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-11-21&to=2022-11-21&type=c)) + +[@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-11-21..2022-11-21&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2022-11-21..2022-11-21&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-11-21..2022-11-21&type=Issues) + +## 2.0.0rc5 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0rc4...12f7c1d47e0ca76f8c39dfd1499142e8b6df09ee)) + +### Enhancements made + +- New configurable/overridable kernel ZMQ+Websocket connection API [#1047](https://github.com/jupyter-server/jupyter_server/pull/1047) ([@Zsailer](https://github.com/Zsailer)) +- Add authorization to AuthenticatedFileHandler [#1021](https://github.com/jupyter-server/jupyter_server/pull/1021) ([@jiajunjie](https://github.com/jiajunjie)) + +### Bugs fixed + +- persist userid cookie when auth is disabled [#1076](https://github.com/jupyter-server/jupyter_server/pull/1076) ([@minrk](https://github.com/minrk)) +- Fix rename_file and delete_file to handle hidden files properly [#1073](https://github.com/jupyter-server/jupyter_server/pull/1073) ([@yacchin1205](https://github.com/yacchin1205)) +- Add more coverage [#1069](https://github.com/jupyter-server/jupyter_server/pull/1069) ([@blink1073](https://github.com/blink1073)) +- Increase nbconvert and checkpoints coverage [#1066](https://github.com/jupyter-server/jupyter_server/pull/1066) ([@blink1073](https://github.com/blink1073)) + +### Maintenance and upkeep improvements + +- Add more serverapp tests [#1079](https://github.com/jupyter-server/jupyter_server/pull/1079) ([@blink1073](https://github.com/blink1073)) +- Add more gateway tests [#1078](https://github.com/jupyter-server/jupyter_server/pull/1078) ([@blink1073](https://github.com/blink1073)) +- More cleanup [#1077](https://github.com/jupyter-server/jupyter_server/pull/1077) ([@blink1073](https://github.com/blink1073)) +- Fix hatch scripts and windows workflow run [#1074](https://github.com/jupyter-server/jupyter_server/pull/1074) ([@blink1073](https://github.com/blink1073)) +- use recommended github-workflows checker [#1071](https://github.com/jupyter-server/jupyter_server/pull/1071) ([@blink1073](https://github.com/blink1073)) +- Add more coverage [#1069](https://github.com/jupyter-server/jupyter_server/pull/1069) ([@blink1073](https://github.com/blink1073)) +- More coverage [#1067](https://github.com/jupyter-server/jupyter_server/pull/1067) ([@blink1073](https://github.com/blink1073)) +- Increase nbconvert and checkpoints coverage [#1066](https://github.com/jupyter-server/jupyter_server/pull/1066) ([@blink1073](https://github.com/blink1073)) +- Test downstream jupyter_server_terminals [#1065](https://github.com/jupyter-server/jupyter_server/pull/1065) ([@blink1073](https://github.com/blink1073)) +- Test notebook prerelease [#1064](https://github.com/jupyter-server/jupyter_server/pull/1064) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- docs: document the logging_config trait [#844](https://github.com/jupyter-server/jupyter_server/pull/844) ([@oliver-sanders](https://github.com/oliver-sanders)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-11-10&to=2022-11-21&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-11-10..2022-11-21&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2022-11-10..2022-11-21&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-11-10..2022-11-21&type=Issues) | [@jiajunjie](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajiajunjie+updated%3A2022-11-10..2022-11-21&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2022-11-10..2022-11-21&type=Issues) | [@oliver-sanders](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aoliver-sanders+updated%3A2022-11-10..2022-11-21&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-11-10..2022-11-21&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-11-10..2022-11-21&type=Issues) | [@yacchin1205](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ayacchin1205+updated%3A2022-11-10..2022-11-21&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-11-10..2022-11-21&type=Issues) + +## 2.0.0rc4 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0rc3...f6b732c652e0b5a600ff0d3f60c6a34173d8d6a5)) + +### Enhancements made + +- Pass kernel environment to `cwd_for_path` method [#1046](https://github.com/jupyter-server/jupyter_server/pull/1046) ([@divyansshhh](https://github.com/divyansshhh)) +- Better Handling of Asyncio [#1035](https://github.com/jupyter-server/jupyter_server/pull/1035) ([@blink1073](https://github.com/blink1073)) + +### Bugs fixed + +- Fix min version check again [#1049](https://github.com/jupyter-server/jupyter_server/pull/1049) ([@blink1073](https://github.com/blink1073)) + +### Maintenance and upkeep improvements + +- MAINT: remove python 3.4 branch [#1061](https://github.com/jupyter-server/jupyter_server/pull/1061) ([@Carreau](https://github.com/Carreau)) +- Bump actions/checkout from 2 to 3 [#1056](https://github.com/jupyter-server/jupyter_server/pull/1056) ([@dependabot](https://github.com/dependabot)) +- Bump actions/setup-python from 2 to 4 [#1055](https://github.com/jupyter-server/jupyter_server/pull/1055) ([@dependabot](https://github.com/dependabot)) +- Bump pre-commit/action from 2.0.0 to 3.0.0 [#1054](https://github.com/jupyter-server/jupyter_server/pull/1054) ([@dependabot](https://github.com/dependabot)) +- Add dependabot file [#1053](https://github.com/jupyter-server/jupyter_server/pull/1053) ([@blink1073](https://github.com/blink1073)) +- Use global env for min version check [#1048](https://github.com/jupyter-server/jupyter_server/pull/1048) ([@blink1073](https://github.com/blink1073)) +- Clean up handling of synchronous managers [#1044](https://github.com/jupyter-server/jupyter_server/pull/1044) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Fix wording [#1037](https://github.com/jupyter-server/jupyter_server/pull/1037) ([@fcollonval](https://github.com/fcollonval)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-10-17&to=2022-11-10&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-10-17..2022-11-10&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2022-10-17..2022-11-10&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-10-17..2022-11-10&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adependabot+updated%3A2022-10-17..2022-11-10&type=Issues) | [@divyansshhh](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adivyansshhh+updated%3A2022-10-17..2022-11-10&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afcollonval+updated%3A2022-10-17..2022-11-10&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-10-17..2022-11-10&type=Issues) + +## 2.0.0rc3 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0rc2...fc0ac3236fdd92778ea765db6e8982212c8389ee)) + +### Maintenance and upkeep improvements + +- Clean up config files [#1031](https://github.com/jupyter-server/jupyter_server/pull/1031) ([@blink1073](https://github.com/blink1073)) +- Make node optional [#1030](https://github.com/jupyter-server/jupyter_server/pull/1030) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-10-11&to=2022-10-17&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-10-11..2022-10-17&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-10-11..2022-10-17&type=Issues) + +## 2.0.0rc2 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0rc1...32de53beae1e9396dd3111b17222ec802b122f0b)) + +### Bugs fixed + +- Fallback new file type to file for contents put [#1013](https://github.com/jupyter-server/jupyter_server/pull/1013) ([@a3626a](https://github.com/a3626a)) +- Fix some typos in release instructions [#1003](https://github.com/jupyter-server/jupyter_server/pull/1003) ([@kevin-bates](https://github.com/kevin-bates)) + +### Maintenance and upkeep improvements + +- Use admin github token for releaser [#1025](https://github.com/jupyter-server/jupyter_server/pull/1025) ([@blink1073](https://github.com/blink1073)) +- CI Cleanup [#1023](https://github.com/jupyter-server/jupyter_server/pull/1023) ([@blink1073](https://github.com/blink1073)) +- Use mdformat instead of prettier [#1022](https://github.com/jupyter-server/jupyter_server/pull/1022) ([@blink1073](https://github.com/blink1073)) +- Add pyproject validation [#1020](https://github.com/jupyter-server/jupyter_server/pull/1020) ([@blink1073](https://github.com/blink1073)) +- Remove hardcoded client install in CI [#1019](https://github.com/jupyter-server/jupyter_server/pull/1019) ([@blink1073](https://github.com/blink1073)) +- Handle client 8 pending kernels [#1014](https://github.com/jupyter-server/jupyter_server/pull/1014) ([@blink1073](https://github.com/blink1073)) +- Use releaser v2 tag [#1010](https://github.com/jupyter-server/jupyter_server/pull/1010) ([@blink1073](https://github.com/blink1073)) +- Use hatch environments to simplify test, coverage, and docs build [#1007](https://github.com/jupyter-server/jupyter_server/pull/1007) ([@blink1073](https://github.com/blink1073)) +- Update to version2 releaser [#1006](https://github.com/jupyter-server/jupyter_server/pull/1006) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Fix GitHub actions badge link [#1011](https://github.com/jupyter-server/jupyter_server/pull/1011) ([@blink1073](https://github.com/blink1073)) +- Pin docutils to fix docs build [#1004](https://github.com/jupyter-server/jupyter_server/pull/1004) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-09-27&to=2022-10-11&type=c)) + +[@a3626a](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aa3626a+updated%3A2022-09-27..2022-10-11&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-09-27..2022-10-11&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-09-27..2022-10-11&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-09-27..2022-10-11&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-09-27..2022-10-11&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-09-27..2022-10-11&type=Issues) + +## 2.0.0rc1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0rc0...dd8a6937651170e2cea38a2fecbecc2a1a4f655f)) + +### Enhancements made + +- \[Gateway\] Add support for gateway token renewal [#985](https://github.com/jupyter-server/jupyter_server/pull/985) ([@kevin-bates](https://github.com/kevin-bates)) +- Make it easier to pass custom env variables to kernel [#981](https://github.com/jupyter-server/jupyter_server/pull/981) ([@divyansshhh](https://github.com/divyansshhh)) + +### Bugs fixed + +- Wrap the concurrent futures in an asyncio future [#1001](https://github.com/jupyter-server/jupyter_server/pull/1001) ([@blink1073](https://github.com/blink1073)) +- \[Gateway\] Fix and deprecate env whitelist handling [#979](https://github.com/jupyter-server/jupyter_server/pull/979) ([@kevin-bates](https://github.com/kevin-bates)) + +### Maintenance and upkeep improvements + +- Do not use dev version yet [#999](https://github.com/jupyter-server/jupyter_server/pull/999) ([@blink1073](https://github.com/blink1073)) +- Add workflows for simplified publish [#993](https://github.com/jupyter-server/jupyter_server/pull/993) ([@blink1073](https://github.com/blink1073)) +- Remove hardcoded client install [#991](https://github.com/jupyter-server/jupyter_server/pull/991) ([@blink1073](https://github.com/blink1073)) +- Test with client 8 updates [#988](https://github.com/jupyter-server/jupyter_server/pull/988) ([@blink1073](https://github.com/blink1073)) +- Switch to using hatchling version command [#984](https://github.com/jupyter-server/jupyter_server/pull/984) ([@blink1073](https://github.com/blink1073)) +- Test python 3.11 on ubuntu [#839](https://github.com/jupyter-server/jupyter_server/pull/839) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Update server extension disable instructions [#998](https://github.com/jupyter-server/jupyter_server/pull/998) ([@3coins](https://github.com/3coins)) + +### Deprecated features + +- \[Gateway\] Fix and deprecate env whitelist handling [#979](https://github.com/jupyter-server/jupyter_server/pull/979) ([@kevin-bates](https://github.com/kevin-bates)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-09-13&to=2022-09-27&type=c)) + +[@3coins](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3A3coins+updated%3A2022-09-13..2022-09-27&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-09-13..2022-09-27&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-09-13..2022-09-27&type=Issues) | [@divyansshhh](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adivyansshhh+updated%3A2022-09-13..2022-09-27&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-09-13..2022-09-27&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksdev+updated%3A2022-09-13..2022-09-27&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-09-13..2022-09-27&type=Issues) + +## 2.0.0rc0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0b1...90905e116a2ae49b35b49c360614b0831498477b)) + +### New features added + +- Identity API at /api/me [#671](https://github.com/jupyter-server/jupyter_server/pull/671) ([@minrk](https://github.com/minrk)) + +### Enhancements made + +- Accept and manage cookies when requesting gateways [#969](https://github.com/jupyter-server/jupyter_server/pull/969) ([@wjsi](https://github.com/wjsi)) +- Emit events from the Contents Service [#954](https://github.com/jupyter-server/jupyter_server/pull/954) ([@Zsailer](https://github.com/Zsailer)) +- Retry certain errors between server and gateway [#944](https://github.com/jupyter-server/jupyter_server/pull/944) ([@kevin-bates](https://github.com/kevin-bates)) +- Allow new file types [#895](https://github.com/jupyter-server/jupyter_server/pull/895) ([@davidbrochart](https://github.com/davidbrochart)) +- Adds anonymous users [#863](https://github.com/jupyter-server/jupyter_server/pull/863) ([@hbcarlos](https://github.com/hbcarlos)) +- switch to jupyter_events [#862](https://github.com/jupyter-server/jupyter_server/pull/862) ([@Zsailer](https://github.com/Zsailer)) +- Make it easier for extensions to customize the ServerApp [#879](https://github.com/jupyter-server/jupyter_server/pull/879) ([@minrk](https://github.com/minrk)) +- consolidate auth config on IdentityProvider [#825](https://github.com/jupyter-server/jupyter_server/pull/825) ([@minrk](https://github.com/minrk)) +- Show import error when failing to load an extension [#878](https://github.com/jupyter-server/jupyter_server/pull/878) ([@minrk](https://github.com/minrk)) +- Add the root_dir value to the logging message in case of non compliant preferred_dir [#804](https://github.com/jupyter-server/jupyter_server/pull/804) ([@echarles](https://github.com/echarles)) +- Hydrate a Kernel Manager when calling GatewayKernelManager.start_kernel with a kernel_id [#788](https://github.com/jupyter-server/jupyter_server/pull/788) ([@Zsailer](https://github.com/Zsailer)) +- Remove terminals in favor of jupyter_server_terminals extension [#651](https://github.com/jupyter-server/jupyter_server/pull/651) ([@Zsailer](https://github.com/Zsailer)) + +### Bugs fixed + +- fix issues with jupyter_events 0.5.0 [#972](https://github.com/jupyter-server/jupyter_server/pull/972) ([@Zsailer](https://github.com/Zsailer)) +- Correct content-type headers [#965](https://github.com/jupyter-server/jupyter_server/pull/965) ([@epignot](https://github.com/epignot)) +- Don't validate certs for when stopping server [#959](https://github.com/jupyter-server/jupyter_server/pull/959) ([@Zsailer](https://github.com/Zsailer)) +- Parse list value for `terminado_settings` [#949](https://github.com/jupyter-server/jupyter_server/pull/949) ([@krassowski](https://github.com/krassowski)) +- Fix bug in `api/contents` requests for an allowed copy [#939](https://github.com/jupyter-server/jupyter_server/pull/939) ([@kiersten-stokes](https://github.com/kiersten-stokes)) +- Fix error that prevents posting to `api/contents` endpoint with no body [#937](https://github.com/jupyter-server/jupyter_server/pull/937) ([@kiersten-stokes](https://github.com/kiersten-stokes)) +- avoid creating asyncio.Lock at import time [#935](https://github.com/jupyter-server/jupyter_server/pull/935) ([@minrk](https://github.com/minrk)) +- Fix `get_kernel_path` for `AsyncFileManager`s. [#929](https://github.com/jupyter-server/jupyter_server/pull/929) ([@thetorpedodog](https://github.com/thetorpedodog)) +- Check for serverapp for reraise flag [#887](https://github.com/jupyter-server/jupyter_server/pull/887) ([@vidartf](https://github.com/vidartf)) +- Notify ChannelQueue that the response router thread is finishing [#896](https://github.com/jupyter-server/jupyter_server/pull/896) ([@CiprianAnton](https://github.com/CiprianAnton)) +- Make ChannelQueue.get_msg true async [#892](https://github.com/jupyter-server/jupyter_server/pull/892) ([@CiprianAnton](https://github.com/CiprianAnton)) +- Fix gateway kernel shutdown [#874](https://github.com/jupyter-server/jupyter_server/pull/874) ([@kevin-bates](https://github.com/kevin-bates)) +- Defer preferred_dir validation until root_dir is set [#826](https://github.com/jupyter-server/jupyter_server/pull/826) ([@kevin-bates](https://github.com/kevin-bates)) +- missing required arguments in utils.fetch [#798](https://github.com/jupyter-server/jupyter_server/pull/798) ([@minrk](https://github.com/minrk)) + +### Maintenance and upkeep improvements + +- Run downstream tests in parallel [#973](https://github.com/jupyter-server/jupyter_server/pull/973) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#971](https://github.com/jupyter-server/jupyter_server/pull/971) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- \[pre-commit.ci\] pre-commit autoupdate [#963](https://github.com/jupyter-server/jupyter_server/pull/963) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Update pytest_plugin with fixtures to test auth in core and extensions [#956](https://github.com/jupyter-server/jupyter_server/pull/956) ([@akshaychitneni](https://github.com/akshaychitneni)) +- \[pre-commit.ci\] pre-commit autoupdate [#955](https://github.com/jupyter-server/jupyter_server/pull/955) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Fix docs build [#952](https://github.com/jupyter-server/jupyter_server/pull/952) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#945](https://github.com/jupyter-server/jupyter_server/pull/945) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- \[pre-commit.ci\] pre-commit autoupdate [#942](https://github.com/jupyter-server/jupyter_server/pull/942) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Fix flake8 v5 compat [#941](https://github.com/jupyter-server/jupyter_server/pull/941) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#938](https://github.com/jupyter-server/jupyter_server/pull/938) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- \[pre-commit.ci\] pre-commit autoupdate [#928](https://github.com/jupyter-server/jupyter_server/pull/928) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- \[pre-commit.ci\] pre-commit autoupdate [#902](https://github.com/jupyter-server/jupyter_server/pull/902) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- \[pre-commit.ci\] pre-commit autoupdate [#894](https://github.com/jupyter-server/jupyter_server/pull/894) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Normalize os_path [#886](https://github.com/jupyter-server/jupyter_server/pull/886) ([@martinRenou](https://github.com/martinRenou)) +- \[pre-commit.ci\] pre-commit autoupdate [#885](https://github.com/jupyter-server/jupyter_server/pull/885) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- suppress tornado deprecation warnings [#882](https://github.com/jupyter-server/jupyter_server/pull/882) ([@minrk](https://github.com/minrk)) +- Fix lint [#867](https://github.com/jupyter-server/jupyter_server/pull/867) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#866](https://github.com/jupyter-server/jupyter_server/pull/866) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Fix sphinx 5.0 support [#865](https://github.com/jupyter-server/jupyter_server/pull/865) ([@blink1073](https://github.com/blink1073)) +- Add license metadata and file [#827](https://github.com/jupyter-server/jupyter_server/pull/827) ([@blink1073](https://github.com/blink1073)) +- CI cleanup [#824](https://github.com/jupyter-server/jupyter_server/pull/824) ([@blink1073](https://github.com/blink1073)) +- Switch to flit [#823](https://github.com/jupyter-server/jupyter_server/pull/823) ([@blink1073](https://github.com/blink1073)) +- Remove unused pytest-mock dependency [#814](https://github.com/jupyter-server/jupyter_server/pull/814) ([@mgorny](https://github.com/mgorny)) +- Remove duplicate requests requirement from setup.cfg [#813](https://github.com/jupyter-server/jupyter_server/pull/813) ([@mgorny](https://github.com/mgorny)) +- \[pre-commit.ci\] pre-commit autoupdate [#802](https://github.com/jupyter-server/jupyter_server/pull/802) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Add helper jobs for branch protection [#797](https://github.com/jupyter-server/jupyter_server/pull/797) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#793](https://github.com/jupyter-server/jupyter_server/pull/793) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Centralize app cleanup [#792](https://github.com/jupyter-server/jupyter_server/pull/792) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#785](https://github.com/jupyter-server/jupyter_server/pull/785) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Clean up pre-commit [#782](https://github.com/jupyter-server/jupyter_server/pull/782) ([@blink1073](https://github.com/blink1073)) +- Add mypy check [#779](https://github.com/jupyter-server/jupyter_server/pull/779) ([@blink1073](https://github.com/blink1073)) +- Use new post-version-spec from jupyter_releaser [#777](https://github.com/jupyter-server/jupyter_server/pull/777) ([@blink1073](https://github.com/blink1073)) +- Give write permissions to enforce label workflow [#776](https://github.com/jupyter-server/jupyter_server/pull/776) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#775](https://github.com/jupyter-server/jupyter_server/pull/775) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Add explicit handling of warnings [#771](https://github.com/jupyter-server/jupyter_server/pull/771) ([@blink1073](https://github.com/blink1073)) +- Use test-sdist from maintainer-tools [#769](https://github.com/jupyter-server/jupyter_server/pull/769) ([@blink1073](https://github.com/blink1073)) +- Add pyupgrade and doc8 hooks [#768](https://github.com/jupyter-server/jupyter_server/pull/768) ([@blink1073](https://github.com/blink1073)) +- update some metadata fields, sort deps [#675](https://github.com/jupyter-server/jupyter_server/pull/675) ([@bollwyvl](https://github.com/bollwyvl)) + +### Documentation improvements + +- Fix typo in IdentityProvider documentation [#915](https://github.com/jupyter-server/jupyter_server/pull/915) ([@danielyahn](https://github.com/danielyahn)) +- Add Session workflows documentation [#808](https://github.com/jupyter-server/jupyter_server/pull/808) ([@andreyvelich](https://github.com/andreyvelich)) +- Add Jupyter Server Architecture diagram [#801](https://github.com/jupyter-server/jupyter_server/pull/801) ([@andreyvelich](https://github.com/andreyvelich)) +- Fix path for full config doc [#800](https://github.com/jupyter-server/jupyter_server/pull/800) ([@andreyvelich](https://github.com/andreyvelich)) +- Fix contributing guide for building the docs [#794](https://github.com/jupyter-server/jupyter_server/pull/794) ([@andreyvelich](https://github.com/andreyvelich)) +- Update team meetings doc [#772](https://github.com/jupyter-server/jupyter_server/pull/772) ([@willingc](https://github.com/willingc)) +- Update documentation about registering file save hooks [#770](https://github.com/jupyter-server/jupyter_server/pull/770) ([@davidbrochart](https://github.com/davidbrochart)) + +### Other merged PRs + +- Update index.rst [#970](https://github.com/jupyter-server/jupyter_server/pull/970) ([@razrotenberg](https://github.com/razrotenberg)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-09-01&to=2022-09-13&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-09-01..2022-09-13&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-09-01..2022-09-13&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2022-09-01..2022-09-13&type=Issues) | [@epignot](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aepignot+updated%3A2022-09-01..2022-09-13&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akrassowski+updated%3A2022-09-01..2022-09-13&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-09-01..2022-09-13&type=Issues) | [@razrotenberg](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Arazrotenberg+updated%3A2022-09-01..2022-09-13&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-09-01..2022-09-13&type=Issues) | [@wjsi](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awjsi+updated%3A2022-09-01..2022-09-13&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-09-01..2022-09-13&type=Issues) + +## 2.0.0b1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0b0...644540b4128e8295e5cedf75e7d7d1c04ba9b3ea)) + +### Enhancements made + +- Emit events from the Contents Service [#954](https://github.com/jupyter-server/jupyter_server/pull/954) ([@Zsailer](https://github.com/Zsailer)) +- Retry certain errors between server and gateway [#944](https://github.com/jupyter-server/jupyter_server/pull/944) ([@kevin-bates](https://github.com/kevin-bates)) +- Allow new file types [#895](https://github.com/jupyter-server/jupyter_server/pull/895) ([@davidbrochart](https://github.com/davidbrochart)) +- Adds anonymous users [#863](https://github.com/jupyter-server/jupyter_server/pull/863) ([@hbcarlos](https://github.com/hbcarlos)) +- switch to jupyter_events [#862](https://github.com/jupyter-server/jupyter_server/pull/862) ([@Zsailer](https://github.com/Zsailer)) + +### Bugs fixed + +- Fix bug in `api/contents` requests for an allowed copy [#939](https://github.com/jupyter-server/jupyter_server/pull/939) ([@kiersten-stokes](https://github.com/kiersten-stokes)) +- Fix error that prevents posting to `api/contents` endpoint with no body [#937](https://github.com/jupyter-server/jupyter_server/pull/937) ([@kiersten-stokes](https://github.com/kiersten-stokes)) +- avoid creating asyncio.Lock at import time [#935](https://github.com/jupyter-server/jupyter_server/pull/935) ([@minrk](https://github.com/minrk)) +- Fix `get_kernel_path` for `AsyncFileManager`s. [#929](https://github.com/jupyter-server/jupyter_server/pull/929) ([@thetorpedodog](https://github.com/thetorpedodog)) +- Check for serverapp for reraise flag [#887](https://github.com/jupyter-server/jupyter_server/pull/887) ([@vidartf](https://github.com/vidartf)) + +### Maintenance and upkeep improvements + +- Update pytest_plugin with fixtures to test auth in core and extensions [#956](https://github.com/jupyter-server/jupyter_server/pull/956) ([@akshaychitneni](https://github.com/akshaychitneni)) +- \[pre-commit.ci\] pre-commit autoupdate [#955](https://github.com/jupyter-server/jupyter_server/pull/955) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Fix docs build [#952](https://github.com/jupyter-server/jupyter_server/pull/952) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#945](https://github.com/jupyter-server/jupyter_server/pull/945) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- \[pre-commit.ci\] pre-commit autoupdate [#942](https://github.com/jupyter-server/jupyter_server/pull/942) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Fix flake8 v5 compat [#941](https://github.com/jupyter-server/jupyter_server/pull/941) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#938](https://github.com/jupyter-server/jupyter_server/pull/938) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- \[pre-commit.ci\] pre-commit autoupdate [#928](https://github.com/jupyter-server/jupyter_server/pull/928) ([@pre-commit-ci](https://github.com/pre-commit-ci)) + +### Documentation improvements + +- Fix typo in IdentityProvider documentation [#915](https://github.com/jupyter-server/jupyter_server/pull/915) ([@danielyahn](https://github.com/danielyahn)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-07-14&to=2022-09-01&type=c)) + +[@akshaychitneni](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aakshaychitneni+updated%3A2022-07-14..2022-09-01&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-07-14..2022-09-01&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-07-14..2022-09-01&type=Issues) | [@danielyahn](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adanielyahn+updated%3A2022-07-14..2022-09-01&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2022-07-14..2022-09-01&type=Issues) | [@dlqqq](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adlqqq+updated%3A2022-07-14..2022-09-01&type=Issues) | [@hbcarlos](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ahbcarlos+updated%3A2022-07-14..2022-09-01&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-07-14..2022-09-01&type=Issues) | [@kiersten-stokes](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akiersten-stokes+updated%3A2022-07-14..2022-09-01&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksdev+updated%3A2022-07-14..2022-09-01&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2022-07-14..2022-09-01&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-07-14..2022-09-01&type=Issues) | [@thetorpedodog](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Athetorpedodog+updated%3A2022-07-14..2022-09-01&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2022-07-14..2022-09-01&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-07-14..2022-09-01&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-07-14..2022-09-01&type=Issues) + +## 2.0.0b0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0a2...cb9c5edff77c2146d0e66425a82c7b6125b5039e)) + +### Enhancements made + +- Make it easier for extensions to customize the ServerApp [#879](https://github.com/jupyter-server/jupyter_server/pull/879) ([@minrk](https://github.com/minrk)) +- consolidate auth config on IdentityProvider [#825](https://github.com/jupyter-server/jupyter_server/pull/825) ([@minrk](https://github.com/minrk)) + +### Bugs fixed + +- Fix c.GatewayClient.url snippet syntax [#917](https://github.com/jupyter-server/jupyter_server/pull/917) ([@rickwierenga](https://github.com/rickwierenga)) +- Add back support for kernel launch timeout pad [#910](https://github.com/jupyter-server/jupyter_server/pull/910) ([@CiprianAnton](https://github.com/CiprianAnton)) + +### Maintenance and upkeep improvements + +- Improve logging of bare exceptions and other cleanups. [#922](https://github.com/jupyter-server/jupyter_server/pull/922) ([@thetorpedodog](https://github.com/thetorpedodog)) +- Use more explicit version template for pyproject [#919](https://github.com/jupyter-server/jupyter_server/pull/919) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#916](https://github.com/jupyter-server/jupyter_server/pull/916) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Fix handling of dev version [#913](https://github.com/jupyter-server/jupyter_server/pull/913) ([@blink1073](https://github.com/blink1073)) +- Fix owasp link [#908](https://github.com/jupyter-server/jupyter_server/pull/908) ([@blink1073](https://github.com/blink1073)) +- default to system node version in precommit [#906](https://github.com/jupyter-server/jupyter_server/pull/906) ([@dlqqq](https://github.com/dlqqq)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-07-05&to=2022-07-14&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-07-05..2022-07-14&type=Issues) | [@CiprianAnton](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACiprianAnton+updated%3A2022-07-05..2022-07-14&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-07-05..2022-07-14&type=Issues) | [@dlqqq](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adlqqq+updated%3A2022-07-05..2022-07-14&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2022-07-05..2022-07-14&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-07-05..2022-07-14&type=Issues) | [@rickwierenga](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Arickwierenga+updated%3A2022-07-05..2022-07-14&type=Issues) | [@thetorpedodog](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Athetorpedodog+updated%3A2022-07-05..2022-07-14&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-07-05..2022-07-14&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-07-05..2022-07-14&type=Issues) + +## 2.0.0a2 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0a1...bd1e7d70b64716097c6b064b2fd5dc67e23d2320)) + +### Enhancements made + +- Show import error when failing to load an extension [#878](https://github.com/jupyter-server/jupyter_server/pull/878) ([@minrk](https://github.com/minrk)) + +### Bugs fixed + +- Notify ChannelQueue that the response router thread is finishing [#896](https://github.com/jupyter-server/jupyter_server/pull/896) ([@CiprianAnton](https://github.com/CiprianAnton)) +- Make ChannelQueue.get_msg true async [#892](https://github.com/jupyter-server/jupyter_server/pull/892) ([@CiprianAnton](https://github.com/CiprianAnton)) +- Fix gateway kernel shutdown [#874](https://github.com/jupyter-server/jupyter_server/pull/874) ([@kevin-bates](https://github.com/kevin-bates)) + +### Maintenance and upkeep improvements + +- \[pre-commit.ci\] pre-commit autoupdate [#902](https://github.com/jupyter-server/jupyter_server/pull/902) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- \[pre-commit.ci\] pre-commit autoupdate [#894](https://github.com/jupyter-server/jupyter_server/pull/894) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Normalize os_path [#886](https://github.com/jupyter-server/jupyter_server/pull/886) ([@martinRenou](https://github.com/martinRenou)) +- \[pre-commit.ci\] pre-commit autoupdate [#885](https://github.com/jupyter-server/jupyter_server/pull/885) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- suppress tornado deprecation warnings [#882](https://github.com/jupyter-server/jupyter_server/pull/882) ([@minrk](https://github.com/minrk)) +- Fix lint [#867](https://github.com/jupyter-server/jupyter_server/pull/867) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#866](https://github.com/jupyter-server/jupyter_server/pull/866) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Fix sphinx 5.0 support [#865](https://github.com/jupyter-server/jupyter_server/pull/865) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Add changelog for 2.0.0a1 [#870](https://github.com/jupyter-server/jupyter_server/pull/870) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-06-07&to=2022-07-05&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-06-07..2022-07-05&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2022-06-07..2022-07-05&type=Issues) | [@CiprianAnton](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACiprianAnton+updated%3A2022-06-07..2022-07-05&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-06-07..2022-07-05&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2022-06-07..2022-07-05&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2022-06-07..2022-07-05&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-06-07..2022-07-05&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AmartinRenou+updated%3A2022-06-07..2022-07-05&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2022-06-07..2022-07-05&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-06-07..2022-07-05&type=Issues) + +## 2.0.0a1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.0a0...v2.0.0a1) + +- Address security advisory [GHSA-q874-g24w-4q9g](https://github.com/jupyter-server/jupyter_server/security/advisories/GHSA-q874-g24w-4q9g). + +## 2.0.0a0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.16.0...3e64fa5eef7fba9f8e17c30cec688254adf913bd)) + +### New features added + +- Identity API at /api/me [#671](https://github.com/jupyter-server/jupyter_server/pull/671) ([@minrk](https://github.com/minrk)) + +### Enhancements made + +- Add the root_dir value to the logging message in case of non compliant preferred_dir [#804](https://github.com/jupyter-server/jupyter_server/pull/804) ([@echarles](https://github.com/echarles)) +- Hydrate a Kernel Manager when calling GatewayKernelManager.start_kernel with a kernel_id [#788](https://github.com/jupyter-server/jupyter_server/pull/788) ([@Zsailer](https://github.com/Zsailer)) +- Remove terminals in favor of jupyter_server_terminals extension [#651](https://github.com/jupyter-server/jupyter_server/pull/651) ([@Zsailer](https://github.com/Zsailer)) + +### Bugs fixed + +- Defer preferred_dir validation until root_dir is set [#826](https://github.com/jupyter-server/jupyter_server/pull/826) ([@kevin-bates](https://github.com/kevin-bates)) +- missing required arguments in utils.fetch [#798](https://github.com/jupyter-server/jupyter_server/pull/798) ([@minrk](https://github.com/minrk)) + +### Maintenance and upkeep improvements + +- Add license metadata and file [#827](https://github.com/jupyter-server/jupyter_server/pull/827) ([@blink1073](https://github.com/blink1073)) +- CI cleanup [#824](https://github.com/jupyter-server/jupyter_server/pull/824) ([@blink1073](https://github.com/blink1073)) +- Switch to flit [#823](https://github.com/jupyter-server/jupyter_server/pull/823) ([@blink1073](https://github.com/blink1073)) +- Remove unused pytest-mock dependency [#814](https://github.com/jupyter-server/jupyter_server/pull/814) ([@mgorny](https://github.com/mgorny)) +- Remove duplicate requests requirement from setup.cfg [#813](https://github.com/jupyter-server/jupyter_server/pull/813) ([@mgorny](https://github.com/mgorny)) +- \[pre-commit.ci\] pre-commit autoupdate [#802](https://github.com/jupyter-server/jupyter_server/pull/802) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Add helper jobs for branch protection [#797](https://github.com/jupyter-server/jupyter_server/pull/797) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#793](https://github.com/jupyter-server/jupyter_server/pull/793) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Centralize app cleanup [#792](https://github.com/jupyter-server/jupyter_server/pull/792) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#785](https://github.com/jupyter-server/jupyter_server/pull/785) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Clean up pre-commit [#782](https://github.com/jupyter-server/jupyter_server/pull/782) ([@blink1073](https://github.com/blink1073)) +- Add mypy check [#779](https://github.com/jupyter-server/jupyter_server/pull/779) ([@blink1073](https://github.com/blink1073)) +- Use new post-version-spec from jupyter_releaser [#777](https://github.com/jupyter-server/jupyter_server/pull/777) ([@blink1073](https://github.com/blink1073)) +- Give write permissions to enforce label workflow [#776](https://github.com/jupyter-server/jupyter_server/pull/776) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#775](https://github.com/jupyter-server/jupyter_server/pull/775) ([@pre-commit-ci](https://github.com/pre-commit-ci)) +- Add explicit handling of warnings [#771](https://github.com/jupyter-server/jupyter_server/pull/771) ([@blink1073](https://github.com/blink1073)) +- Use test-sdist from maintainer-tools [#769](https://github.com/jupyter-server/jupyter_server/pull/769) ([@blink1073](https://github.com/blink1073)) +- Add pyupgrade and doc8 hooks [#768](https://github.com/jupyter-server/jupyter_server/pull/768) ([@blink1073](https://github.com/blink1073)) +- update some metadata fields, sort deps [#675](https://github.com/jupyter-server/jupyter_server/pull/675) ([@bollwyvl](https://github.com/bollwyvl)) + +### Documentation improvements + +- Add Session workflows documentation [#808](https://github.com/jupyter-server/jupyter_server/pull/808) ([@andreyvelich](https://github.com/andreyvelich)) +- Add Jupyter Server Architecture diagram [#801](https://github.com/jupyter-server/jupyter_server/pull/801) ([@andreyvelich](https://github.com/andreyvelich)) +- Fix path for full config doc [#800](https://github.com/jupyter-server/jupyter_server/pull/800) ([@andreyvelich](https://github.com/andreyvelich)) +- Fix contributing guide for building the docs [#794](https://github.com/jupyter-server/jupyter_server/pull/794) ([@andreyvelich](https://github.com/andreyvelich)) +- Update team meetings doc [#772](https://github.com/jupyter-server/jupyter_server/pull/772) ([@willingc](https://github.com/willingc)) +- Update documentation about registering file save hooks [#770](https://github.com/jupyter-server/jupyter_server/pull/770) ([@davidbrochart](https://github.com/davidbrochart)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-03-29&to=2022-05-03&type=c)) + +[@andreyvelich](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aandreyvelich+updated%3A2022-03-29..2022-05-03&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-03-29..2022-05-03&type=Issues) | [@bollwyvl](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abollwyvl+updated%3A2022-03-29..2022-05-03&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-03-29..2022-05-03&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2022-03-29..2022-05-03&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2022-03-29..2022-05-03&type=Issues) | [@hbcarlos](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ahbcarlos+updated%3A2022-03-29..2022-05-03&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-03-29..2022-05-03&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksdev+updated%3A2022-03-29..2022-05-03&type=Issues) | [@mgorny](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amgorny+updated%3A2022-03-29..2022-05-03&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2022-03-29..2022-05-03&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2022-03-29..2022-05-03&type=Issues) | [@SylvainCorlay](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ASylvainCorlay+updated%3A2022-03-29..2022-05-03&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-03-29..2022-05-03&type=Issues) | [@Wh1isper](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AWh1isper+updated%3A2022-03-29..2022-05-03&type=Issues) | [@willingc](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awillingc+updated%3A2022-03-29..2022-05-03&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-03-29..2022-05-03&type=Issues) + +## 1.17.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.16.0...2b296099777d50aa86f67faf94d5cbfde906b169)) + +### Enhancements made + +- Add the root_dir value to the logging message in case of non compliant preferred_dir [#804](https://github.com/jupyter-server/jupyter_server/pull/804) ([@echarles](https://github.com/echarles)) + +### Bugs fixed + +- missing required arguments in utils.fetch [#798](https://github.com/jupyter-server/jupyter_server/pull/798) ([@minrk](https://github.com/minrk)) + +### Maintenance and upkeep improvements + +- Add helper jobs for branch protection [#797](https://github.com/jupyter-server/jupyter_server/pull/797) ([@blink1073](https://github.com/blink1073)) +- \[pre-commit.ci\] pre-commit autoupdate [#793](https://github.com/jupyter-server/jupyter_server/pull/793) ([@pre-commit-ci\[bot\]](https://github.com/apps/pre-commit-ci)) +- Update branch references and links [#791](https://github.com/jupyter-server/jupyter_server/pull/791) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2022-03-29&to=2022-04-27&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-03-29..2022-04-27&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-03-29..2022-04-27&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2022-03-29..2022-04-27&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2022-03-29..2022-04-27&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2022-03-29..2022-04-27&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksdev+updated%3A2022-03-29..2022-04-27&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksmachine+updated%3A2022-03-29..2022-04-27&type=Issues) | [@Wh1isper](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AWh1isper+updated%3A2022-03-29..2022-04-27&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-03-29..2022-04-27&type=Issues) + ## 1.16.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.15.6...d32b887ae2c3b77fe3ae67ba79c3d3c6713c0d8a)) @@ -18,7 +1202,7 @@ All notable changes to this project will be documented in this file. ### Bugs fixed -- Regression in connection URL calcuation in ServerApp [#761](https://github.com/jupyter-server/jupyter_server/pull/761) ([@jhamet93](https://github.com/jhamet93)) +- Regression in connection URL calculation in ServerApp [#761](https://github.com/jupyter-server/jupyter_server/pull/761) ([@jhamet93](https://github.com/jhamet93)) - Include explicit package data [#757](https://github.com/jupyter-server/jupyter_server/pull/757) ([@blink1073](https://github.com/blink1073)) - Ensure terminal cwd exists [#755](https://github.com/jupyter-server/jupyter_server/pull/755) ([@fcollonval](https://github.com/fcollonval)) - make 'cwd' param for TerminalManager absolute [#749](https://github.com/jupyter-server/jupyter_server/pull/749) ([@rccern](https://github.com/rccern)) @@ -46,8 +1230,6 @@ All notable changes to this project will be documented in this file. [@andreyvelich](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aandreyvelich+updated%3A2022-03-16..2022-03-29&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2022-03-16..2022-03-29&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2022-03-16..2022-03-29&type=Issues) | [@divyansshhh](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adivyansshhh+updated%3A2022-03-16..2022-03-29&type=Issues) | [@dleen](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adleen+updated%3A2022-03-16..2022-03-29&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afcollonval+updated%3A2022-03-16..2022-03-29&type=Issues) | [@jhamet93](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajhamet93+updated%3A2022-03-16..2022-03-29&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ameeseeksdev+updated%3A2022-03-16..2022-03-29&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2022-03-16..2022-03-29&type=Issues) | [@rccern](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Arccern+updated%3A2022-03-16..2022-03-29&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2022-03-16..2022-03-29&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2022-03-16..2022-03-29&type=Issues) - - ## 1.15.6 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.15.5...7fbaa767c71302cc756bdf1fb11fcbf1b3768dcc)) @@ -130,7 +1312,7 @@ All notable changes to this project will be documented in this file. ### Bugs fixed -- Revert "Re-use ServerApp.config_file_paths for consistency (#715)" [#728](https://github.com/jupyter-server/jupyter_server/pull/728) ([@blink1073](https://github.com/blink1073)) +- Revert "Reuse ServerApp.config_file_paths for consistency (#715)" [#728](https://github.com/jupyter-server/jupyter_server/pull/728) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release @@ -155,7 +1337,7 @@ All notable changes to this project will be documented in this file. - Implement Required Methods in Async Manner [#721](https://github.com/jupyter-server/jupyter_server/pull/721) ([@jhamet93](https://github.com/jhamet93)) - Call pre_save_hook only on first chunk of large files [#716](https://github.com/jupyter-server/jupyter_server/pull/716) ([@davidbrochart](https://github.com/davidbrochart)) -- Re-use ServerApp.config_file_paths for consistency [#715](https://github.com/jupyter-server/jupyter_server/pull/715) ([@minrk](https://github.com/minrk)) +- Reuse ServerApp.config_file_paths for consistency [#715](https://github.com/jupyter-server/jupyter_server/pull/715) ([@minrk](https://github.com/minrk)) - serverapp: Use .absolute() instead of .resolve() for symlinks [#712](https://github.com/jupyter-server/jupyter_server/pull/712) ([@EricCousineau-TRI](https://github.com/EricCousineau-TRI)) - Fall back to legacy protocol if selected_subprotocol raises exception [#706](https://github.com/jupyter-server/jupyter_server/pull/706) ([@davidbrochart](https://github.com/davidbrochart)) - Fix FilesHandler not meet RFC 6713 [#701](https://github.com/jupyter-server/jupyter_server/pull/701) ([@Wh1isper](https://github.com/Wh1isper)) @@ -360,7 +1542,7 @@ All notable changes to this project will be documented in this file. ### Bugs fixed -- Fix \s deprecation warning [#600](https://github.com/jupyter-server/jupyter_server/pull/600) ([@Zsailer](https://github.com/Zsailer)) +- Fix \\s deprecation warning [#600](https://github.com/jupyter-server/jupyter_server/pull/600) ([@Zsailer](https://github.com/Zsailer)) - Remove requests-unixsocket dependency [#599](https://github.com/jupyter-server/jupyter_server/pull/599) ([@kevin-bates](https://github.com/kevin-bates)) - bugfix: dir_exists is never awaited [#597](https://github.com/jupyter-server/jupyter_server/pull/597) ([@stdll00](https://github.com/stdll00)) - Fix missing await when call 'async_replace_file' [#595](https://github.com/jupyter-server/jupyter_server/pull/595) ([@Wh1isper](https://github.com/Wh1isper)) @@ -384,7 +1566,7 @@ All notable changes to this project will be documented in this file. ### Bugs fixed - Do not log connection error if the kernel is already shutdown [#584](https://github.com/jupyter-server/jupyter_server/pull/584) ([@martinRenou](https://github.com/martinRenou)) -- [BUG]: allow None for min_open_files_limit trait [#587](https://github.com/jupyter-server/jupyter_server/pull/587) ([@Zsailer](https://github.com/Zsailer)) +- \[BUG\]: allow None for min_open_files_limit trait [#587](https://github.com/jupyter-server/jupyter_server/pull/587) ([@Zsailer](https://github.com/Zsailer)) ### Contributors to this release @@ -491,7 +1673,7 @@ All notable changes to this project will be documented in this file. - enable a way to run a task when an io_loop is created [#531](https://github.com/jupyter-server/jupyter_server/pull/531) ([@eastonsuo](https://github.com/eastonsuo)) - adds `GatewayClient.auth_scheme` configurable [#529](https://github.com/jupyter-server/jupyter_server/pull/529) ([@telamonian](https://github.com/telamonian)) -- [Notebook port 4835] Add UNIX socket support to notebook server [#525](https://github.com/jupyter-server/jupyter_server/pull/525) ([@jtpio](https://github.com/jtpio)) +- \[Notebook port 4835\] Add UNIX socket support to notebook server [#525](https://github.com/jupyter-server/jupyter_server/pull/525) ([@jtpio](https://github.com/jtpio)) ### Bugs fixed @@ -575,7 +1757,7 @@ All notable changes to this project will be documented in this file. ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-04-22&to=2021-05-10&type=c)) [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-05-06..2021-05-10&type=Issues) | [@hMED22](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AhMED22+updated%3A2021-05-06..2021-05-10&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-05-06..2021-05-10&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-05-06..2021-05-10&type=Issues) | [@the-higgs](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Athe-higgs+updated%3A2021-05-06..2021-05-10&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-05-06..2021-05-10&type=Issues) -[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-05-01..2021-05-05&type=Issues) | [@candlerb](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acandlerb+updated%3A2021-05-01..2021-05-05&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-05-01..2021-05-05&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2021-05-01..2021-05-05&type=Issues) | [@mwakaba2](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amwakaba2+updated%3A2021-05-01..2021-05-05&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-05-01..2021-05-05&type=Issues) | [@kiendang](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akiendang+updated%3A2021-04-21..2021-05-01&type=Issues) | [@Carreau] +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-05-01..2021-05-05&type=Issues) | [@candlerb](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acandlerb+updated%3A2021-05-01..2021-05-05&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-05-01..2021-05-05&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2021-05-01..2021-05-05&type=Issues) | [@mwakaba2](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amwakaba2+updated%3A2021-05-01..2021-05-05&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-05-01..2021-05-05&type=Issues) | [@kiendang](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akiendang+updated%3A2021-04-21..2021-05-01&type=Issues) | \[@Carreau\] (https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2021-04-21..2021-05-01&type=Issues) ## 1.6.4 @@ -741,15 +1923,15 @@ All notable changes to this project will be documented in this file. **Merged pull requests:** -- Add Tests to Distribution [\#416](https://github.com/jupyter-server/jupyter_server/pull/416) ([afshin](https://github.com/afshin)) -- Enable extensions to control the file_to_run [\#415](https://github.com/jupyter-server/jupyter_server/pull/415) ([afshin](https://github.com/afshin)) -- add missing template for view.html [\#414](https://github.com/jupyter-server/jupyter_server/pull/414) ([minrk](https://github.com/minrk)) -- Remove obsoleted asyncio-patch fixture [\#412](https://github.com/jupyter-server/jupyter_server/pull/412) ([kevin-bates](https://github.com/kevin-bates)) -- Emit deprecation warning on old name [\#411](https://github.com/jupyter-server/jupyter_server/pull/411) ([fcollonval](https://github.com/fcollonval)) -- Correct logging message position [\#410](https://github.com/jupyter-server/jupyter_server/pull/410) ([fcollonval](https://github.com/fcollonval)) -- Update 1.3.0 Changelog to include broken 1.2.3 PRs [\#408](https://github.com/jupyter-server/jupyter_server/pull/408) ([kevin-bates](https://github.com/kevin-bates)) -- \[Gateway\] Track only this server's kernels [\#407](https://github.com/jupyter-server/jupyter_server/pull/407) ([kevin-bates](https://github.com/kevin-bates)) -- Update manager.py: more descriptive warnings when extensions fail to load [\#396](https://github.com/jupyter-server/jupyter_server/pull/396) ([alberti42](https://github.com/alberti42)) +- Add Tests to Distribution [#416](https://github.com/jupyter-server/jupyter_server/pull/416) ([afshin](https://github.com/afshin)) +- Enable extensions to control the file_to_run [#415](https://github.com/jupyter-server/jupyter_server/pull/415) ([afshin](https://github.com/afshin)) +- add missing template for view.html [#414](https://github.com/jupyter-server/jupyter_server/pull/414) ([minrk](https://github.com/minrk)) +- Remove obsoleted asyncio-patch fixture [#412](https://github.com/jupyter-server/jupyter_server/pull/412) ([kevin-bates](https://github.com/kevin-bates)) +- Emit deprecation warning on old name [#411](https://github.com/jupyter-server/jupyter_server/pull/411) ([fcollonval](https://github.com/fcollonval)) +- Correct logging message position [#410](https://github.com/jupyter-server/jupyter_server/pull/410) ([fcollonval](https://github.com/fcollonval)) +- Update 1.3.0 Changelog to include broken 1.2.3 PRs [#408](https://github.com/jupyter-server/jupyter_server/pull/408) ([kevin-bates](https://github.com/kevin-bates)) +- \[Gateway\] Track only this server's kernels [#407](https://github.com/jupyter-server/jupyter_server/pull/407) ([kevin-bates](https://github.com/kevin-bates)) +- Update manager.py: more descriptive warnings when extensions fail to load [#396](https://github.com/jupyter-server/jupyter_server/pull/396) ([alberti42](https://github.com/alberti42)) ## [1.3.0](https://github.com/jupyter-server/jupyter_server/tree/1.3.0) (2021-02-04) @@ -757,15 +1939,15 @@ All notable changes to this project will be documented in this file. **Merged pull requests (includes those from broken 1.2.3 release):** -- Special case ExtensionApp that starts the ServerApp [\#401](https://github.com/jupyter-server/jupyter_server/pull/401) ([afshin](https://github.com/afshin)) -- only use deprecated notebook_dir config if root_dir is not set [\#400](https://github.com/jupyter-server/jupyter_server/pull/400) ([minrk](https://github.com/minrk)) -- Use async kernel manager by default [\#399](https://github.com/jupyter-server/jupyter_server/pull/399) ([kevin-bates](https://github.com/kevin-bates)) -- Revert Session.username default value change [\#398](https://github.com/jupyter-server/jupyter_server/pull/398) ([mwakaba2](https://github.com/mwakaba2)) -- Re-enable default_url in ExtensionApp [\#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) -- Enable notebook ContentsManager in jupyter_server [\#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) -- Use jupyter_server_config.json as config file in the update password api [\#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) -- Increase culling test idle timeout [\#388](https://github.com/jupyter-server/jupyter_server/pull/388) ([kevin-bates](https://github.com/kevin-bates)) -- update changelog for 1.2.2 [\#387](https://github.com/jupyter-server/jupyter_server/pull/387) ([Zsailer](https://github.com/Zsailer)) +- Special case ExtensionApp that starts the ServerApp [#401](https://github.com/jupyter-server/jupyter_server/pull/401) ([afshin](https://github.com/afshin)) +- only use deprecated notebook_dir config if root_dir is not set [#400](https://github.com/jupyter-server/jupyter_server/pull/400) ([minrk](https://github.com/minrk)) +- Use async kernel manager by default [#399](https://github.com/jupyter-server/jupyter_server/pull/399) ([kevin-bates](https://github.com/kevin-bates)) +- Revert Session.username default value change [#398](https://github.com/jupyter-server/jupyter_server/pull/398) ([mwakaba2](https://github.com/mwakaba2)) +- Re-enable default_url in ExtensionApp [#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) +- Enable notebook ContentsManager in jupyter_server [#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) +- Use jupyter_server_config.json as config file in the update password api [#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) +- Increase culling test idle timeout [#388](https://github.com/jupyter-server/jupyter_server/pull/388) ([kevin-bates](https://github.com/kevin-bates)) +- update changelog for 1.2.2 [#387](https://github.com/jupyter-server/jupyter_server/pull/387) ([Zsailer](https://github.com/Zsailer)) ## [1.2.3](https://github.com/jupyter-server/jupyter_server/tree/1.2.3) (2021-01-29) @@ -775,21 +1957,21 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Re-enable default_url in ExtensionApp [\#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) -- Enable notebook ContentsManager in jupyter_server [\#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) -- Use jupyter_server_config.json as config file in the update password api [\#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) -- Increase culling test idle timeout [\#388](https://github.com/jupyter-server/jupyter_server/pull/388) ([kevin-bates](https://github.com/kevin-bates)) -- update changelog for 1.2.2 [\#387](https://github.com/jupyter-server/jupyter_server/pull/387) ([Zsailer](https://github.com/Zsailer)) +- Re-enable default_url in ExtensionApp [#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) +- Enable notebook ContentsManager in jupyter_server [#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) +- Use jupyter_server_config.json as config file in the update password api [#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) +- Increase culling test idle timeout [#388](https://github.com/jupyter-server/jupyter_server/pull/388) ([kevin-bates](https://github.com/kevin-bates)) +- update changelog for 1.2.2 [#387](https://github.com/jupyter-server/jupyter_server/pull/387) ([Zsailer](https://github.com/Zsailer)) ## [1.2.2](https://github.com/jupyter-server/jupyter_server/tree/1.2.2) (2021-01-14) **Merged pull requests:** -- Apply missing ensure_async to root session handler methods [\#386](https://github.com/jupyter-server/jupyter_server/pull/386) ([kevin-bates](https://github.com/kevin-bates)) -- Update changelog to 1.2.1 [\#385](https://github.com/jupyter-server/jupyter_server/pull/385) ([Zsailer](https://github.com/Zsailer)) -- Fix application exit [\#384](https://github.com/jupyter-server/jupyter_server/pull/384) ([afshin](https://github.com/afshin)) -- Replace secure_write, is_hidden, exists with jupyter_core's [\#382](https://github.com/jupyter-server/jupyter_server/pull/382) ([kevin-bates](https://github.com/kevin-bates)) -- Add --autoreload flag [\#380](https://github.com/jupyter-server/jupyter_server/pull/380) ([afshin](https://github.com/afshin)) +- Apply missing ensure_async to root session handler methods [#386](https://github.com/jupyter-server/jupyter_server/pull/386) ([kevin-bates](https://github.com/kevin-bates)) +- Update changelog to 1.2.1 [#385](https://github.com/jupyter-server/jupyter_server/pull/385) ([Zsailer](https://github.com/Zsailer)) +- Fix application exit [#384](https://github.com/jupyter-server/jupyter_server/pull/384) ([afshin](https://github.com/afshin)) +- Replace secure_write, is_hidden, exists with jupyter_core's [#382](https://github.com/jupyter-server/jupyter_server/pull/382) ([kevin-bates](https://github.com/kevin-bates)) +- Add --autoreload flag [#380](https://github.com/jupyter-server/jupyter_server/pull/380) ([afshin](https://github.com/afshin)) ## [1.2.1](https://github.com/jupyter-server/jupyter_server/tree/1.2.1) (2021-01-08) @@ -797,8 +1979,8 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Enable extensions to set debug and open-browser flags [\#379](https://github.com/jupyter-server/jupyter_server/pull/379) ([afshin](https://github.com/afshin)) -- Add reconnection to Gateway [\#378](https://github.com/jupyter-server/jupyter_server/pull/378) ([oyvsyo](https://github.com/oyvsyo)) +- Enable extensions to set debug and open-browser flags [#379](https://github.com/jupyter-server/jupyter_server/pull/379) ([afshin](https://github.com/afshin)) +- Add reconnection to Gateway [#378](https://github.com/jupyter-server/jupyter_server/pull/378) ([oyvsyo](https://github.com/oyvsyo)) ## [1.2.0](https://github.com/jupyter-server/jupyter_server/tree/1.2.0) (2021-01-07) @@ -806,11 +1988,11 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Flip default value for open_browser in extensions [\#377](https://github.com/jupyter-server/jupyter_server/pull/377) ([ajbozarth](https://github.com/ajbozarth)) -- Improve Handling of the soft limit on open file handles [\#376](https://github.com/jupyter-server/jupyter_server/pull/376) ([afshin](https://github.com/afshin)) -- Handle open_browser trait in ServerApp and ExtensionApp differently [\#375](https://github.com/jupyter-server/jupyter_server/pull/375) ([afshin](https://github.com/afshin)) -- Add setting to disable redirect file browser launch [\#374](https://github.com/jupyter-server/jupyter_server/pull/374) ([afshin](https://github.com/afshin)) -- Make trust handle use ensure_async [\#373](https://github.com/jupyter-server/jupyter_server/pull/373) ([vidartf](https://github.com/vidartf)) +- Flip default value for open_browser in extensions [#377](https://github.com/jupyter-server/jupyter_server/pull/377) ([ajbozarth](https://github.com/ajbozarth)) +- Improve Handling of the soft limit on open file handles [#376](https://github.com/jupyter-server/jupyter_server/pull/376) ([afshin](https://github.com/afshin)) +- Handle open_browser trait in ServerApp and ExtensionApp differently [#375](https://github.com/jupyter-server/jupyter_server/pull/375) ([afshin](https://github.com/afshin)) +- Add setting to disable redirect file browser launch [#374](https://github.com/jupyter-server/jupyter_server/pull/374) ([afshin](https://github.com/afshin)) +- Make trust handle use ensure_async [#373](https://github.com/jupyter-server/jupyter_server/pull/373) ([vidartf](https://github.com/vidartf)) ## [1.1.4](https://github.com/jupyter-server/jupyter_server/tree/1.1.4) (2021-01-04) @@ -818,10 +2000,10 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Update the link to paths documentation [\#371](https://github.com/jupyter-server/jupyter_server/pull/371) ([krassowski](https://github.com/krassowski)) -- IPythonHandler -\> JupyterHandler [\#370](https://github.com/jupyter-server/jupyter_server/pull/370) ([krassowski](https://github.com/krassowski)) -- use setuptools find_packages, exclude tests, docs and examples from dist [\#368](https://github.com/jupyter-server/jupyter_server/pull/368) ([bollwyvl](https://github.com/bollwyvl)) -- Update serverapp.py [\#367](https://github.com/jupyter-server/jupyter_server/pull/367) ([michaelaye](https://github.com/michaelaye)) +- Update the link to paths documentation [#371](https://github.com/jupyter-server/jupyter_server/pull/371) ([krassowski](https://github.com/krassowski)) +- IPythonHandler -> JupyterHandler [#370](https://github.com/jupyter-server/jupyter_server/pull/370) ([krassowski](https://github.com/krassowski)) +- use setuptools find_packages, exclude tests, docs and examples from dist [#368](https://github.com/jupyter-server/jupyter_server/pull/368) ([bollwyvl](https://github.com/bollwyvl)) +- Update serverapp.py [#367](https://github.com/jupyter-server/jupyter_server/pull/367) ([michaelaye](https://github.com/michaelaye)) ## [1.1.3](https://github.com/jupyter-server/jupyter_server/tree/1.1.3) (2020-12-23) @@ -829,7 +2011,7 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Culling: ensure last_activity attr exists before use [\#365](https://github.com/jupyter-server/jupyter_server/pull/365) ([afshin](https://github.com/afshin)) +- Culling: ensure last_activity attr exists before use [#365](https://github.com/jupyter-server/jupyter_server/pull/365) ([afshin](https://github.com/afshin)) ## [1.1.2](https://github.com/jupyter-server/jupyter_server/tree/1.1.2) (2020-12-21) @@ -837,7 +2019,7 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Nudge kernel with info request until we receive IOPub messages [\#361](https://github.com/jupyter-server/jupyter_server/pull/361) ([SylvainCorlay](https://github.com/SylvainCorlay)) +- Nudge kernel with info request until we receive IOPub messages [#361](https://github.com/jupyter-server/jupyter_server/pull/361) ([SylvainCorlay](https://github.com/SylvainCorlay)) ## [1.1.1](https://github.com/jupyter-server/jupyter_server/tree/1.1.1) (2020-12-16) @@ -845,7 +2027,7 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Fix: await possible async dir_exists method [\#363](https://github.com/jupyter-server/jupyter_server/pull/363) ([mwakaba2](https://github.com/mwakaba2)) +- Fix: await possible async dir_exists method [#363](https://github.com/jupyter-server/jupyter_server/pull/363) ([mwakaba2](https://github.com/mwakaba2)) ## 1.1.0 (2020-12-11) @@ -853,20 +2035,20 @@ This was a broken release and was yanked from PyPI. **Merged pull requests:** -- Restore pytest plugin from pytest-jupyter [\#360](https://github.com/jupyter-server/jupyter_server/pull/360) ([kevin-bates](https://github.com/kevin-bates)) -- Fix upgrade packaging dependencies build step [\#354](https://github.com/jupyter-server/jupyter_server/pull/354) ([mwakaba2](https://github.com/mwakaba2)) -- Await \_connect and inline read_messages callback to \_connect [\#350](https://github.com/jupyter-server/jupyter_server/pull/350) ([ricklamers](https://github.com/ricklamers)) -- Update release instructions and dev version [\#348](https://github.com/jupyter-server/jupyter_server/pull/348) ([kevin-bates](https://github.com/kevin-bates)) -- Fix test_trailing_slash [\#346](https://github.com/jupyter-server/jupyter_server/pull/346) ([kevin-bates](https://github.com/kevin-bates)) -- Apply security advisory fix to master [\#345](https://github.com/jupyter-server/jupyter_server/pull/345) ([kevin-bates](https://github.com/kevin-bates)) -- Allow toggling auth for prometheus metrics [\#344](https://github.com/jupyter-server/jupyter_server/pull/344) ([yuvipanda](https://github.com/yuvipanda)) -- Port Notebook PRs 5565 and 5588 - terminal shell heuristics [\#343](https://github.com/jupyter-server/jupyter_server/pull/343) ([kevin-bates](https://github.com/kevin-bates)) -- Port gateway updates from notebook \(PRs 5317 and 5484\) [\#341](https://github.com/jupyter-server/jupyter_server/pull/341) ([kevin-bates](https://github.com/kevin-bates)) -- add check_origin handler to gateway WebSocketChannelsHandler [\#340](https://github.com/jupyter-server/jupyter_server/pull/340) ([ricklamers](https://github.com/ricklamers)) -- Remove pytest11 entrypoint and plugin, require tornado 6.1, remove asyncio patch, CI work [\#339](https://github.com/jupyter-server/jupyter_server/pull/339) ([bollwyvl](https://github.com/bollwyvl)) -- Switch fixtures to use those in pytest-jupyter to avoid collisions [\#335](https://github.com/jupyter-server/jupyter_server/pull/335) ([kevin-bates](https://github.com/kevin-bates)) -- Enable CodeQL runs on all pushed branches [\#333](https://github.com/jupyter-server/jupyter_server/pull/333) ([kevin-bates](https://github.com/kevin-bates)) -- Asynchronous Contents API [\#324](https://github.com/jupyter-server/jupyter_server/pull/324) ([mwakaba2](https://github.com/mwakaba2)) +- Restore pytest plugin from pytest-jupyter [#360](https://github.com/jupyter-server/jupyter_server/pull/360) ([kevin-bates](https://github.com/kevin-bates)) +- Fix upgrade packaging dependencies build step [#354](https://github.com/jupyter-server/jupyter_server/pull/354) ([mwakaba2](https://github.com/mwakaba2)) +- Await \_connect and inline read_messages callback to \_connect [#350](https://github.com/jupyter-server/jupyter_server/pull/350) ([ricklamers](https://github.com/ricklamers)) +- Update release instructions and dev version [#348](https://github.com/jupyter-server/jupyter_server/pull/348) ([kevin-bates](https://github.com/kevin-bates)) +- Fix test_trailing_slash [#346](https://github.com/jupyter-server/jupyter_server/pull/346) ([kevin-bates](https://github.com/kevin-bates)) +- Apply security advisory fix to master [#345](https://github.com/jupyter-server/jupyter_server/pull/345) ([kevin-bates](https://github.com/kevin-bates)) +- Allow toggling auth for prometheus metrics [#344](https://github.com/jupyter-server/jupyter_server/pull/344) ([yuvipanda](https://github.com/yuvipanda)) +- Port Notebook PRs 5565 and 5588 - terminal shell heuristics [#343](https://github.com/jupyter-server/jupyter_server/pull/343) ([kevin-bates](https://github.com/kevin-bates)) +- Port gateway updates from notebook (PRs 5317 and 5484) [#341](https://github.com/jupyter-server/jupyter_server/pull/341) ([kevin-bates](https://github.com/kevin-bates)) +- add check_origin handler to gateway WebSocketChannelsHandler [#340](https://github.com/jupyter-server/jupyter_server/pull/340) ([ricklamers](https://github.com/ricklamers)) +- Remove pytest11 entrypoint and plugin, require tornado 6.1, remove asyncio patch, CI work [#339](https://github.com/jupyter-server/jupyter_server/pull/339) ([bollwyvl](https://github.com/bollwyvl)) +- Switch fixtures to use those in pytest-jupyter to avoid collisions [#335](https://github.com/jupyter-server/jupyter_server/pull/335) ([kevin-bates](https://github.com/kevin-bates)) +- Enable CodeQL runs on all pushed branches [#333](https://github.com/jupyter-server/jupyter_server/pull/333) ([kevin-bates](https://github.com/kevin-bates)) +- Asynchronous Contents API [#324](https://github.com/jupyter-server/jupyter_server/pull/324) ([mwakaba2](https://github.com/mwakaba2)) ## 1.0.6 (2020-11-18) @@ -902,17 +2084,17 @@ This was a broken release and was yanked from PyPI. - Prevent a re-definition of prometheus metrics if `notebook` package already imports them. ([#210](https://github.com/jupyter/jupyter_server/pull/210)) - Fixed `terminals` REST API unit tests that weren't shutting down properly. ([221](https://github.com/jupyter/jupyter_server/pull/221)) -- Fixed jupyter_server on Windows for Python < 3.7. Added patch to handle subprocess cleanup. ([240](https://github.com/jupyter/jupyter_server/pull/240)) +- Fixed jupyter_server on Windows for Python \< 3.7. Added patch to handle subprocess cleanup. ([240](https://github.com/jupyter/jupyter_server/pull/240)) - `base_url` was being duplicated when getting a url path from the `ServerApp`. ([280](https://github.com/jupyter/jupyter_server/pull/280)) - Extension URLs are now properly prefixed with `base_url`. Previously, all `static` paths were not. ([285](https://github.com/jupyter/jupyter_server/pull/285)) - Changed ExtensionApp mixin to inherit from `HasTraits`. This broke in traitlets 5.0 ([294](https://github.com/jupyter/jupyter_server/pull/294)) - Replaces `urlparse` with `url_path_join` to prevent URL squashing issues. ([304](https://github.com/jupyter/jupyter_server/pull/304)) -## [0.3] - 2020-4-22 +## \[0.3\] - 2020-4-22 ### Added -- ([#191](https://github.com/jupyter/jupyter_server/pull/191)) Async kernel managment is now possible using the `AsyncKernelManager` from `jupyter_client` +- ([#191](https://github.com/jupyter/jupyter_server/pull/191)) Async kernel management is now possible using the `AsyncKernelManager` from `jupyter_client` - ([#201](https://github.com/jupyter/jupyter_server/pull/201)) Parameters can now be passed to new terminals created by the `terminals` REST API. ### Changed @@ -924,7 +2106,7 @@ This was a broken release and was yanked from PyPI. - ([#194](https://github.com/jupyter/jupyter_server/pull/194)) The bundlerextension entry point was removed. -## [0.2.1] - 2020-1-10 +## \[0.2.1\] - 2020-1-10 ### Added @@ -936,7 +2118,7 @@ This was a broken release and was yanked from PyPI. - `fetch`: an awaitable function that tests makes requests to the server API - `create_notebook`: a function that writes a notebook to a given temporary file path. -## [0.2.0] - 2019-12-19 +## \[0.2.0\] - 2019-12-19 ### Added diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index d97a2606e8..1845b54745 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -24,7 +24,7 @@ The development version of the server requires `node `_, -`pandoc `_ and a few other packages. + pip install hatch + hatch run test:test -To install (and activate) a `conda environment`_ named ``server_docs`` -containing all the necessary packages (except pandoc), use:: +The command takes any argument that you can give to ``pytest``, e.g.:: - conda env create -f docs/environment.yml - source activate server_docs # Linux and OS X - activate server_docs # Windows + hatch run test:test -k name_of_method_to_test -.. _conda environment: - https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#creating-an-environment-from-an-environment-yml-file +You can also drop into a shell in the test environment by running:: + + hatch -e test shell + +Building the Docs +================= -If you want to install the necessary packages with ``pip`` instead:: +Install the docs requirements using ``pip``:: - pip install -r docs/doc-requirements.txt + pip install .[doc] Once you have installed the required packages, you can build the docs with:: cd docs make html -After that, the generated HTML files will be available at -``build/html/index.html``. You may view the docs in your browser. +You can also run the tests using ``hatch`` without installing test dependencies +in your local environment. + + pip install hatch + hatch run docs:build -You can automatically check if all hyperlinks are still valid:: +You can also drop into a shell in the docs environment by running:: - make linkcheck + hatch -e docs shell + +After that, the generated HTML files will be available at +``build/html/index.html``. You may view the docs in your browser. Windows users can find ``make.bat`` in the ``docs`` folder. diff --git a/COPYING.md b/COPYING.md deleted file mode 100644 index 7cfb970db8..0000000000 --- a/COPYING.md +++ /dev/null @@ -1,60 +0,0 @@ -# Licensing terms - -This project is licensed under the terms of the Modified BSD License -(also known as New or Revised or 3-Clause BSD), as follows: - -- Copyright (c) 2001-2015, IPython Development Team -- Copyright (c) 2015-, Jupyter Development Team - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this -list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright notice, this -list of conditions and the following disclaimer in the documentation and/or -other materials provided with the distribution. - -Neither the name of the Jupyter Development Team nor the names of its -contributors may be used to endorse or promote products derived from this -software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -## About the Jupyter Development Team - -The Jupyter Development Team is the set of all contributors to the Jupyter project. -This includes all of the Jupyter subprojects. - -The core team that coordinates development on GitHub can be found here: -https://github.com/jupyter/. - -## Our Copyright Policy - -Jupyter uses a shared copyright model. Each contributor maintains copyright -over their contributions to Jupyter. But, it is important to note that these -contributions are typically only changes to the repositories. Thus, the Jupyter -source code, in its entirety is not the copyright of any single person or -institution. Instead, it is the collective copyright of the entire Jupyter -Development Team. If individual contributors want to maintain a record of what -changes/contributions they have specific copyright on, they should indicate -their copyright in the commit message of the change, when they commit the -change to one of the Jupyter repositories. - -With this in mind, the following banner should be used in any source code file -to indicate the copyright and license terms: - - # Copyright (c) Jupyter Development Team. - # Distributed under the terms of the Modified BSD License. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..076177a497 --- /dev/null +++ b/LICENSE @@ -0,0 +1,31 @@ +BSD 3-Clause License + +- Copyright (c) 2001-2015, IPython Development Team +- Copyright (c) 2015-, Jupyter Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 5ec49b2ec5..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,32 +0,0 @@ -include COPYING.md -include CONTRIBUTING.rst -include README.md -include RELEASE.md -include CHANGELOG.md -include package.json - -# include everything in package_data -recursive-include jupyter_server * -recursive-include tests * - -# Documentation -graft docs -exclude docs/\#* - -# Examples -graft examples - -# docs subdirs we want to skip -prune docs/build -prune docs/gh-pages -prune docs/dist - -# Patterns to exclude from any directory -global-exclude *~ -global-exclude *.pyc -global-exclude *.pyo -prune .git -prune **/.ipynb_checkpoints -prune **/.pytest_cache -prune **/.coverage -prune **/.pytest_cache diff --git a/README.md b/README.md index 0958182d81..d5e7540b85 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Jupyter Server -[![Build Status](https://github.com/jupyter/jupyter_server/workflows/CI/badge.svg?query=branch%3Amain++)](https://github.com/jupyter-server/jupyter_server/actions?query=branch%3Amain++) +[![Build Status](https://github.com/jupyter-server/jupyter_server/actions/workflows/python-tests.yml/badge.svg?query=branch%3Amain++)](https://github.com/jupyter-server/jupyter_server/actions/workflows/python-tests.yml/badge.svg?query=branch%3Amain++) [![Documentation Status](https://readthedocs.org/projects/jupyter-server/badge/?version=latest)](http://jupyter-server.readthedocs.io/en/latest/?badge=latest) The Jupyter Server provides the backend (i.e. the core services, APIs, and REST endpoints) for Jupyter web applications like Jupyter notebook, JupyterLab, and Voila. @@ -12,7 +12,9 @@ For more information, read our [documentation here](http://jupyter-server.readth To install the latest release locally, make sure you have [pip installed](https://pip.readthedocs.io/en/stable/installing/) and run: - pip install jupyter_server +``` +pip install jupyter_server +``` Jupyter Server currently supports Python>=3.6 on Linux, OSX and Windows. @@ -30,7 +32,9 @@ To see the changes between releases, checkout the [CHANGELOG](https://github.com Launch with: - jupyter server +``` +jupyter server +``` ### Testing @@ -44,6 +48,34 @@ If you are interested in contributing to the project, see [`CONTRIBUTING.rst`](C - When: Thursdays [8:00am, Pacific time](https://www.thetimezoneconverter.com/?t=8%3A00%20am&tz=San%20Francisco&) - Where: [Jovyan Zoom](https://zoom.us/my/jovyan?pwd=c0JZTHlNdS9Sek9vdzR3aTJ4SzFTQT09) -- What: [Meeting notes](https://github.com/jupyter-server/team-compass/issues/4) +- What: [Meeting notes](https://github.com/jupyter-server/team-compass/issues/45) See our tentative [roadmap here](https://github.com/jupyter/jupyter_server/issues/127). + +## About the Jupyter Development Team + +The Jupyter Development Team is the set of all contributors to the Jupyter project. +This includes all of the Jupyter subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +Jupyter uses a shared copyright model. Each contributor maintains copyright +over their contributions to Jupyter. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the Jupyter +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Jupyter repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + +``` +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +``` diff --git a/RELEASE.md b/RELEASE.md index a785a28701..29c4c54d25 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -2,7 +2,10 @@ ## Using `jupyter_releaser` -The recommended way to make a release is to use [`jupyter_releaser`](https://github.com/jupyter-server/jupyter_releaser#checklist-for-adoption). +The recommended way to make a release is to use [`jupyter_releaser`](https://jupyter-releaser.readthedocs.io/en/latest/get_started/making_release_from_repo.html). + +Note that we must use manual versions since Jupyter Releaser does not +yet support "next" or "patch" when dev versions are used. ## Manual Release @@ -11,7 +14,7 @@ To create a manual release, perform the following steps: ### Set up ```bash -pip install tbump twine build +pip install hatch twine build git pull origin $(git branch --show-current) git clean -dffx ``` @@ -20,8 +23,9 @@ git clean -dffx ```bash echo "Enter new version" -read script_version -tbump ${script_version} +read new_version +hatch version ${new_version} +git tag -a ${new_version} -m "Release ${new_version}" ``` ### Build the artifacts @@ -36,7 +40,7 @@ python -m build . ```bash echo "Enter dev version" read dev_version -tbump ${dev_version} --no-tag +hatch version ${dev_version} git push origin $(git branch --show-current) ``` diff --git a/codecov.yml b/codecov.yml deleted file mode 100644 index eb9b9dff30..0000000000 --- a/codecov.yml +++ /dev/null @@ -1,9 +0,0 @@ -coverage: - status: - project: - default: - target: auto - threshold: 10 - patch: - default: - target: 0% diff --git a/docs/Makefile b/docs/Makefile index 1765c64af3..f760bf014c 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -53,15 +53,11 @@ clean: rm -rf $(BUILDDIR)/* rm -rf source/config.rst -html: source/config.rst +html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." -source/config.rst: - python3 autogen_config.py - @echo "Created docs for config options" - dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo diff --git a/docs/autogen_config.py b/docs/autogen_config.py deleted file mode 100644 index 7f4ec15db0..0000000000 --- a/docs/autogen_config.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python -import os - -from jupyter_server.serverapp import ServerApp - -header = """\ -.. _other-full-config: - - -Config file and command line options -==================================== - -The Jupyter Server can be run with a variety of command line arguments. -A list of available options can be found below in the :ref:`options section -`. - -Defaults for these options can also be set by creating a file named -``jupyter_server_config.py`` in your Jupyter folder. The Jupyter -folder is in your home directory, ``~/.jupyter``. - -To create a ``jupyter_server_config.py`` file, with all the defaults -commented out, you can use the following command line:: - - $ jupyter server --generate-config - - -.. _options: - -Options -------- - -This list of options can be generated by running the following and hitting -enter:: - - $ jupyter server --help-all - -""" -try: - destination = os.path.join(os.path.dirname(__file__), "source/other/full-config.rst") -except BaseException: - destination = os.path.join(os.getcwd(), "full-config.rst") - -with open(destination, "w") as f: - f.write(header) - f.write(ServerApp().document_config_options()) diff --git a/docs/doc-requirements.txt b/docs/doc-requirements.txt deleted file mode 100644 index 4bf44e12ee..0000000000 --- a/docs/doc-requirements.txt +++ /dev/null @@ -1,12 +0,0 @@ -ipykernel -jinja2 -jupyter_client -myst-parser -nbformat -prometheus_client -pydata_sphinx_theme -Send2Trash -sphinxcontrib-openapi -sphinxcontrib_github_alt -sphinxemoji -tornado diff --git a/docs/environment.yml b/docs/environment.yml deleted file mode 100644 index dda583469d..0000000000 --- a/docs/environment.yml +++ /dev/null @@ -1,7 +0,0 @@ -name: jupyter_server_docs -dependencies: - - nodejs - - python - - pip - - pip: - - -r doc-requirements.txt diff --git a/docs/source/api/jupyter_server.auth.rst b/docs/source/api/jupyter_server.auth.rst new file mode 100644 index 0000000000..66c364b44c --- /dev/null +++ b/docs/source/api/jupyter_server.auth.rst @@ -0,0 +1,55 @@ +jupyter\_server.auth package +============================ + +Submodules +---------- + + +.. automodule:: jupyter_server.auth.authorizer + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.auth.decorator + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.auth.identity + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.auth.login + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.auth.logout + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.auth.security + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.auth.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.auth + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.base.rst b/docs/source/api/jupyter_server.base.rst new file mode 100644 index 0000000000..0a8e12c6d1 --- /dev/null +++ b/docs/source/api/jupyter_server.base.rst @@ -0,0 +1,37 @@ +jupyter\_server.base package +============================ + +Submodules +---------- + + +.. automodule:: jupyter_server.base.call_context + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.base.handlers + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.base.websocket + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.base.zmqhandlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.base + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.extension.rst b/docs/source/api/jupyter_server.extension.rst new file mode 100644 index 0000000000..4610c696e6 --- /dev/null +++ b/docs/source/api/jupyter_server.extension.rst @@ -0,0 +1,49 @@ +jupyter\_server.extension package +================================= + +Submodules +---------- + + +.. automodule:: jupyter_server.extension.application + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.extension.config + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.extension.handler + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.extension.manager + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.extension.serverextension + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.extension.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.extension + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.files.rst b/docs/source/api/jupyter_server.files.rst new file mode 100644 index 0000000000..892865814a --- /dev/null +++ b/docs/source/api/jupyter_server.files.rst @@ -0,0 +1,19 @@ +jupyter\_server.files package +============================= + +Submodules +---------- + + +.. automodule:: jupyter_server.files.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.files + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.gateway.rst b/docs/source/api/jupyter_server.gateway.rst new file mode 100644 index 0000000000..c08595e672 --- /dev/null +++ b/docs/source/api/jupyter_server.gateway.rst @@ -0,0 +1,37 @@ +jupyter\_server.gateway package +=============================== + +Submodules +---------- + + +.. automodule:: jupyter_server.gateway.connections + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.gateway.gateway_client + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.gateway.handlers + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.gateway.managers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.gateway + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.i18n.rst b/docs/source/api/jupyter_server.i18n.rst new file mode 100644 index 0000000000..ada5d341bb --- /dev/null +++ b/docs/source/api/jupyter_server.i18n.rst @@ -0,0 +1,10 @@ +jupyter\_server.i18n package +============================ + +Module contents +--------------- + +.. automodule:: jupyter_server.i18n + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.kernelspecs.rst b/docs/source/api/jupyter_server.kernelspecs.rst new file mode 100644 index 0000000000..0c36baf16e --- /dev/null +++ b/docs/source/api/jupyter_server.kernelspecs.rst @@ -0,0 +1,19 @@ +jupyter\_server.kernelspecs package +=================================== + +Submodules +---------- + + +.. automodule:: jupyter_server.kernelspecs.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.kernelspecs + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.nbconvert.rst b/docs/source/api/jupyter_server.nbconvert.rst new file mode 100644 index 0000000000..52d9dc00ad --- /dev/null +++ b/docs/source/api/jupyter_server.nbconvert.rst @@ -0,0 +1,19 @@ +jupyter\_server.nbconvert package +================================= + +Submodules +---------- + + +.. automodule:: jupyter_server.nbconvert.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.nbconvert + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.prometheus.rst b/docs/source/api/jupyter_server.prometheus.rst new file mode 100644 index 0000000000..91a61ed686 --- /dev/null +++ b/docs/source/api/jupyter_server.prometheus.rst @@ -0,0 +1,25 @@ +jupyter\_server.prometheus package +================================== + +Submodules +---------- + + +.. automodule:: jupyter_server.prometheus.log_functions + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.prometheus.metrics + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.prometheus + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.rst b/docs/source/api/jupyter_server.rst new file mode 100644 index 0000000000..6402594325 --- /dev/null +++ b/docs/source/api/jupyter_server.rst @@ -0,0 +1,67 @@ +jupyter\_server package +======================= + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + jupyter_server.auth + jupyter_server.base + jupyter_server.extension + jupyter_server.files + jupyter_server.gateway + jupyter_server.i18n + jupyter_server.kernelspecs + jupyter_server.nbconvert + jupyter_server.prometheus + jupyter_server.services + jupyter_server.view + +Submodules +---------- + + +.. automodule:: jupyter_server.config_manager + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.log + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.serverapp + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.traittypes + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.transutils + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.api.rst b/docs/source/api/jupyter_server.services.api.rst new file mode 100644 index 0000000000..e7389bc02d --- /dev/null +++ b/docs/source/api/jupyter_server.services.api.rst @@ -0,0 +1,19 @@ +jupyter\_server.services.api package +==================================== + +Submodules +---------- + + +.. automodule:: jupyter_server.services.api.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.api + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.config.rst b/docs/source/api/jupyter_server.services.config.rst new file mode 100644 index 0000000000..7f1c1d1e23 --- /dev/null +++ b/docs/source/api/jupyter_server.services.config.rst @@ -0,0 +1,25 @@ +jupyter\_server.services.config package +======================================= + +Submodules +---------- + + +.. automodule:: jupyter_server.services.config.handlers + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.config.manager + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.config + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.contents.rst b/docs/source/api/jupyter_server.services.contents.rst new file mode 100644 index 0000000000..bf33989ea1 --- /dev/null +++ b/docs/source/api/jupyter_server.services.contents.rst @@ -0,0 +1,55 @@ +jupyter\_server.services.contents package +========================================= + +Submodules +---------- + + +.. automodule:: jupyter_server.services.contents.checkpoints + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.contents.filecheckpoints + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.contents.fileio + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.contents.filemanager + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.contents.handlers + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.contents.largefilemanager + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.contents.manager + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.contents + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.events.rst b/docs/source/api/jupyter_server.services.events.rst new file mode 100644 index 0000000000..eebb056ac6 --- /dev/null +++ b/docs/source/api/jupyter_server.services.events.rst @@ -0,0 +1,19 @@ +jupyter\_server.services.events package +======================================= + +Submodules +---------- + + +.. automodule:: jupyter_server.services.events.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.events + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.kernels.connection.rst b/docs/source/api/jupyter_server.services.kernels.connection.rst new file mode 100644 index 0000000000..076f81c46b --- /dev/null +++ b/docs/source/api/jupyter_server.services.kernels.connection.rst @@ -0,0 +1,31 @@ +jupyter\_server.services.kernels.connection package +=================================================== + +Submodules +---------- + + +.. automodule:: jupyter_server.services.kernels.connection.abc + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.kernels.connection.base + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.kernels.connection.channels + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.kernels.connection + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.kernels.rst b/docs/source/api/jupyter_server.services.kernels.rst new file mode 100644 index 0000000000..865db764ce --- /dev/null +++ b/docs/source/api/jupyter_server.services.kernels.rst @@ -0,0 +1,39 @@ +jupyter\_server.services.kernels package +======================================== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + jupyter_server.services.kernels.connection + +Submodules +---------- + + +.. automodule:: jupyter_server.services.kernels.handlers + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.kernels.kernelmanager + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.kernels.websocket + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.kernels + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.kernelspecs.rst b/docs/source/api/jupyter_server.services.kernelspecs.rst new file mode 100644 index 0000000000..3f210d0f55 --- /dev/null +++ b/docs/source/api/jupyter_server.services.kernelspecs.rst @@ -0,0 +1,19 @@ +jupyter\_server.services.kernelspecs package +============================================ + +Submodules +---------- + + +.. automodule:: jupyter_server.services.kernelspecs.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.kernelspecs + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.nbconvert.rst b/docs/source/api/jupyter_server.services.nbconvert.rst new file mode 100644 index 0000000000..cf5f0c037d --- /dev/null +++ b/docs/source/api/jupyter_server.services.nbconvert.rst @@ -0,0 +1,19 @@ +jupyter\_server.services.nbconvert package +========================================== + +Submodules +---------- + + +.. automodule:: jupyter_server.services.nbconvert.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.nbconvert + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.rst b/docs/source/api/jupyter_server.services.rst new file mode 100644 index 0000000000..6f221e7357 --- /dev/null +++ b/docs/source/api/jupyter_server.services.rst @@ -0,0 +1,35 @@ +jupyter\_server.services package +================================ + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + jupyter_server.services.api + jupyter_server.services.config + jupyter_server.services.contents + jupyter_server.services.events + jupyter_server.services.kernels + jupyter_server.services.kernelspecs + jupyter_server.services.nbconvert + jupyter_server.services.security + jupyter_server.services.sessions + +Submodules +---------- + + +.. automodule:: jupyter_server.services.shutdown + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.security.rst b/docs/source/api/jupyter_server.services.security.rst new file mode 100644 index 0000000000..7d7b2a975c --- /dev/null +++ b/docs/source/api/jupyter_server.services.security.rst @@ -0,0 +1,19 @@ +jupyter\_server.services.security package +========================================= + +Submodules +---------- + + +.. automodule:: jupyter_server.services.security.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.security + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.services.sessions.rst b/docs/source/api/jupyter_server.services.sessions.rst new file mode 100644 index 0000000000..292de65c34 --- /dev/null +++ b/docs/source/api/jupyter_server.services.sessions.rst @@ -0,0 +1,25 @@ +jupyter\_server.services.sessions package +========================================= + +Submodules +---------- + + +.. automodule:: jupyter_server.services.sessions.handlers + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: jupyter_server.services.sessions.sessionmanager + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.services.sessions + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/jupyter_server.view.rst b/docs/source/api/jupyter_server.view.rst new file mode 100644 index 0000000000..f83aff120e --- /dev/null +++ b/docs/source/api/jupyter_server.view.rst @@ -0,0 +1,19 @@ +jupyter\_server.view package +============================ + +Submodules +---------- + + +.. automodule:: jupyter_server.view.handlers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: jupyter_server.view + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/api/modules.rst b/docs/source/api/modules.rst new file mode 100644 index 0000000000..3237ddf087 --- /dev/null +++ b/docs/source/api/modules.rst @@ -0,0 +1,7 @@ +jupyter_server +============== + +.. toctree:: + :maxdepth: 4 + + jupyter_server diff --git a/docs/source/conf.py b/docs/source/conf.py index c78443f012..7f59cb956b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# # Jupyter Server documentation build configuration file, created by # sphinx-quickstart on Mon Apr 13 09:51:11 2015. # @@ -16,47 +14,9 @@ import shutil import sys -from pkg_resources import parse_version - HERE = osp.abspath(osp.dirname(__file__)) - - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. - -# DEBUG for RTD -print("DEBUG:: sys.path") -print("================") -for item in sys.path: - print(item) - -# add repo root to sys.path -# here = root/docs/source -here = os.path.abspath(os.path.dirname(__file__)) -repo_root = os.path.dirname(os.path.dirname(here)) -sys.path.insert(0, repo_root) - -print("repo_root") -print("=====================") -print(repo_root) - -# DEBUG for post insert on RTD -print("DEBUG:: Post insert to sys.path") -print("===============================") -for item in sys.path: - print(item) - -# Check if docs are being built by ReadTheDocs -# If so, generate a config.rst file and populate it with documentation about -# configuration options - -if os.environ.get("READTHEDOCS", ""): - - # Readthedocs doesn't run our Makefile, so we do this to force it to generate - # the config docs. - with open("../autogen_config.py") as f: - exec(compile(f.read(), "../autogen_config.py", "exec"), {}) +sys.path.insert(0, osp.join(HERE, "..", "")) +from jupyter_server._version import version_info # -- General configuration ------------------------------------------------ @@ -73,12 +33,21 @@ "sphinx.ext.intersphinx", "sphinx.ext.autosummary", "sphinx.ext.mathjax", + "sphinx.ext.napoleon", "IPython.sphinxext.ipython_console_highlighting", "sphinxcontrib_github_alt", "sphinxcontrib.openapi", "sphinxemoji.sphinxemoji", + "sphinx_autodoc_typehints", ] +try: + import enchant # type:ignore[import-not-found] + + extensions += ["sphinxcontrib.spelling"] +except ImportError: + pass + myst_enable_extensions = ["html_image"] # Add any paths that contain templates here, relative to this directory. @@ -107,17 +76,15 @@ # |version| and |release|, also used in various other places throughout the # built documents. # -__version__ = "1.14.0.dev0" # The short X.Y version. -version_parsed = parse_version(__version__) -version = f"{version_parsed.major}.{version_parsed.minor}" +version = f"{version_info[0]}.{version_info[1]}" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -131,7 +98,7 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -# default_role = None +default_role = "literal" # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True @@ -183,7 +150,7 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -# html_theme_options = {} +html_theme_options = {"navigation_with_keys": False} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] @@ -356,19 +323,63 @@ # texinfo_no_detailmenu = False intersphinx_mapping = { + "python": ("https://docs.python.org/", None), "ipython": ("https://ipython.readthedocs.io/en/stable/", None), - "nbconvert": ("https://nbconvert.readthedocs.io/en/latest/", None), - "nbformat": ("https://nbformat.readthedocs.io/en/latest/", None), - "jupyter": ("https://jupyter.readthedocs.io/en/latest/", None), + "nbconvert": ("https://nbconvert.readthedocs.io/en/stable/", None), + "nbformat": ("https://nbformat.readthedocs.io/en/stable/", None), + "jupyter_core": ("https://jupyter-core.readthedocs.io/en/stable/", None), + "tornado": ("https://www.tornadoweb.org/en/stable/", None), + "traitlets": ("https://traitlets.readthedocs.io/en/stable/", None), } spelling_lang = "en_US" spelling_word_list_filename = "spelling_wordlist.txt" # import before any doc is built, so _ is guaranteed to be injected -import jupyter_server.transutils # noqa: F401 +import jupyter_server.transutils + +CONFIG_HEADER = """\ +.. _other-full-config: + + +Config file and command line options +==================================== + +The Jupyter Server can be run with a variety of command line arguments. +A list of available options can be found below in the :ref:`options section +`. + +Defaults for these options can also be set by creating a file named +``jupyter_server_config.py`` in your Jupyter folder. The Jupyter +folder is in your home directory, ``~/.jupyter``. + +To create a ``jupyter_server_config.py`` file, with all the defaults +commented out, you can use the following command line:: + + $ jupyter server --generate-config + + +.. _options: + +Options +------- + +This list of options can be generated by running the following and hitting +enter:: + + $ jupyter server --help-all + +""" def setup(app): dest = osp.join(HERE, "other", "changelog.md") shutil.copy(osp.join(HERE, "..", "..", "CHANGELOG.md"), dest) + + # Generate full-config docs. + from jupyter_server.serverapp import ServerApp + + destination = os.path.join(HERE, "other/full-config.rst") + with open(destination, "w") as f: + f.write(CONFIG_HEADER) + f.write(ServerApp().document_config_options()) diff --git a/docs/source/developers/architecture.rst b/docs/source/developers/architecture.rst new file mode 100644 index 0000000000..0a32a2774c --- /dev/null +++ b/docs/source/developers/architecture.rst @@ -0,0 +1,180 @@ +.. _architecture: + +Architecture Diagrams +===================== + +This page describes the Jupyter Server architecture and the main workflows. +This information is useful for developers who want to understand how Jupyter +Server components are connected and how the principal workflows look like. + +To make changes for these diagrams, use `the Draw.io `_ +open source tool to edit the png file. + + +Jupyter Server Architecture +--------------------------- + +The Jupyter Server system can be seen in the figure below: + +.. image:: ../images/jupyter-server-architecture.drawio.png + :alt: Jupyter Server Architecture + :width: 100% + :align: center + +Jupyter Server contains the following components: + +- **ServerApp** is the main Tornado-based application which connects all + components together. + +- **Config Manager** initializes configuration for the ServerApp. You can define + custom classes for the Jupyter Server managers using this config and change + ServerApp settings. Follow :ref:`the Config File Guide ` to + learn about configuration settings and how to build custom config. + +- **Custom Extensions** allow you to create the custom Server's REST API endpoints. + Follow :ref:`the Extension Guide ` to know more about extending + ServerApp with extra request handlers. + +- **Gateway Server** is a web server that, when configured, provides access to + Jupyter kernels running on other hosts. There are different ways to create a + gateway server. If your ServerApp needs to communicate with remote kernels + residing within resource-managed clusters, you can use + `Enterprise Gateway `_, + otherwise, you can use + `Kernel Gateway `_, where + kernels run locally to the gateway server. + +- **Contents Manager and File Contents Manager** are responsible for serving + Notebook on the file system. Session Manager uses Contents Manager to receive + kernel path. Follow :ref:`the Contents API guide ` to learn + about Contents Manager. + +- **Session Manager** processes users' Sessions. When a user starts a new kernel, + Session Manager starts a process to provision kernel for the user and generates + a new Session ID. Each opened Notebook has a separate Session, but different + Notebook kernels can use the same Session. That is useful if the user wants to + share data across various opened Notebooks. Session Manager uses SQLite3 + database to store the Session information. The database is stored in memory by + default, but can be configured to save to disk. + +- **Mapping Kernel Manager** is responsible for managing the lifecycles of the + kernels running within the ServerApp. It starts a new kernel for a user's Session + and facilitates interrupt, restart, and shutdown operations against the kernel. + +- **Jupyter Client** library is used by Jupyter Server to work with the Notebook + kernels. + + - **Kernel Manager** manages a single kernel for the Notebook. To know more about + Kernel Manager, follow + `the Jupyter Client APIs documentation `_. + + - **Kernel Spec Manager** parses files with JSON specification for a kernels, + and provides a list of available kernel configurations. To learn about + Kernel Spec Manager, check `the Jupyter Client guide `_. + +Create Session Workflow +----------------------- + +The create Session workflow can be seen in the figure below: + +.. image:: ../images/session-create.drawio.png + :alt: Create Session Workflow + :width: 90% + :align: center + +When a user starts a new kernel, the following steps occur: + +#. The Notebook client sends |create_session|_ request to Jupyter Server. This + request has all necessary data, such as Notebook name, type, path, and kernel + name. + +#. **Session Manager** asks **Contents Manager** for the kernel file system path + based on the input data. + +#. **Session Manager** sends kernel path to **Mapping Kernel Manager**. + +#. **Mapping Kernel Manager** starts the kernel create process by using + **Multi Kernel Manager** and **Kernel Manager**. You can learn more about + **Multi Kernel Manager** in + `the Jupyter Client APIs `_. + +#. **Kernel Manager** uses the provisioner layer to launch a new kernel. + +#. **Kernel Provisioner** is responsible for launching kernels based on the + kernel specification. If the kernel specification doesn't define a provisioner, + it uses `Local Provisioner `_ + to launch the kernel. You can use + `Kernel Provisioner Base `_ + and + `Kernel Provisioner Factory `_ + to create custom provisioners. + +#. **Kernel Spec Manager** gets the kernel specification from the JSON file. + The specification is located in ``kernel.json`` file. + +#. Once **Kernel Provisioner** launches the kernel, + **Kernel Manager** generates the new kernel ID for **Session Manager**. + +#. **Session Manager** saves the new Session data to the SQLite3 database + (Session ID, Notebook path, Notebook name, Notebook type, and kernel ID). + +#. Notebook client receives the created Session data. + +.. _create_session: https://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyter_server/master/jupyter_server/services/api/api.yaml#/sessions/post_api_sessions + +.. |create_session| replace:: the *POST /api/sessions* + +Delete Session Workflow +----------------------- + +The delete Session workflow can be seen in the figure below: + +.. image:: ../images/session-delete.drawio.png + :alt: Delete Session Workflow + :width: 80% + :align: center + +When a user stops a kernel, the following steps occur: + +#. The Notebook client sends |delete_session|_ request to Jupyter Server. This + request has the Session ID that kernel is currently using. + +#. **Session Manager** gets the Session data from the SQLite3 database and sends + the kernel ID to **Mapping Kernel Manager**. + +#. **Mapping Kernel Manager** starts the kernel shutdown process by using + **Multi Kernel Manager** and **Kernel Manager**. + +#. **Kernel Manager** determines the mode of interrupt from the + **Kernel Spec Manager**. It supports ``Signal`` and ``Message`` + interrupt modes. By default, the ``Signal`` interrupt mode is being used. + + - When the interrupt mode is ``Signal``, the **Kernel Provisioner** + interrupts the kernel with the ``SIGINT`` operating system signal + (although other provisioner implementations may use a different approach). + + - When interrupt mode is ``Message``, Session sends + the `"interrupt_request" `_ + message on the control channel. + +#. After interrupting kernel, Session sends the `"shutdown_request" `_ + message on the control channel. + +#. **Kernel Manager** waits for the kernel shutdown. After the timeout, and if + it detects the kernel process is still running, the **Kernel Manager** + terminates the kernel sending a ``SIGTERM`` operating system signal + (or provisioner equivalent). If it finds the kernel process has + not terminated, the **Kernel Manager** will follow up with a ``SIGKILL`` + operating system signal (or provisioner equivalent) to ensure the kernel's + termination. + +#. **Kernel Manager** cleans up the kernel resources. It removes kernel's interprocess + communication ports, closes control socket, and releases Shell, IOPub, StdIn, + Control, and Heartbeat ports. + +#. When shutdown is finished, **Session Manager** deletes the Session data from + the SQLite3 database and responses 204 status code to the Notebook client. + +.. _delete_session: https://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyter_server/master/jupyter_server/services/api/api.yaml#/sessions/delete_api_sessions__session_ + +.. |delete_session| replace:: the *DELETE /api/sessions/{session_id}* diff --git a/docs/source/developers/contents.rst b/docs/source/developers/contents.rst index 28d2a33334..6910535f30 100644 --- a/docs/source/developers/contents.rst +++ b/docs/source/developers/contents.rst @@ -33,36 +33,48 @@ which we refer to as **models**. Models may contain the following entries: -+--------------------+-----------+------------------------------+ -| Key | Type |Info | -+====================+===========+==============================+ -|**name** |unicode |Basename of the entity. | -+--------------------+-----------+------------------------------+ -|**path** |unicode |Full | -| | |(:ref:`API-style`) | -| | |path to the entity. | -+--------------------+-----------+------------------------------+ -|**type** |unicode |The entity type. One of | -| | |``"notebook"``, ``"file"`` or | -| | |``"directory"``. | -+--------------------+-----------+------------------------------+ -|**created** |datetime |Creation date of the entity. | -+--------------------+-----------+------------------------------+ -|**last_modified** |datetime |Last modified date of the | -| | |entity. | -+--------------------+-----------+------------------------------+ -|**content** |variable |The "content" of the entity. | -| | |(:ref:`See | -| | |Below`) | -+--------------------+-----------+------------------------------+ -|**mimetype** |unicode or |The mimetype of ``content``, | -| |``None`` |if any. (:ref:`See | -| | |Below`) | -+--------------------+-----------+------------------------------+ -|**format** |unicode or |The format of ``content``, | -| |``None`` |if any. (:ref:`See | -| | |Below`) | -+--------------------+-----------+------------------------------+ ++--------------------+------------+-------------------------------+ +| Key | Type | Info | ++====================+============+===============================+ +| **name** | unicode | Basename of the entity. | ++--------------------+------------+-------------------------------+ +| **path** | unicode | Full | +| | | (:ref:`API-style`) | +| | | path to the entity. | ++--------------------+------------+-------------------------------+ +| **type** | unicode | The entity type. One of | +| | | ``"notebook"``, ``"file"`` or | +| | | ``"directory"``. | ++--------------------+------------+-------------------------------+ +| **created** | datetime | Creation date of the entity. | ++--------------------+------------+-------------------------------+ +| **last_modified** | datetime | Last modified date of the | +| | | entity. | ++--------------------+------------+-------------------------------+ +| **content** | variable | The "content" of the entity. | +| | | (:ref:`See | +| | | Below`) | ++--------------------+------------+-------------------------------+ +| **mimetype** | unicode or | The mimetype of ``content``, | +| | ``None`` | if any. (:ref:`See | +| | | Below`) | ++--------------------+------------+-------------------------------+ +| **format** | unicode or | The format of ``content``, | +| | ``None`` | if any. (:ref:`See | +| | | Below`) | ++--------------------+------------+-------------------------------+ +| [optional] | | | +| **hash** | unicode or | The hash of the contents. | +| | ``None`` | It cannot be null if | +| | | ``hash_algorithm`` is | +| | | defined. | ++--------------------+------------+-------------------------------+ +| [optional] | | | +| **hash_algorithm** | unicode or | The algorithm used to compute | +| | ``None`` | hash value. | +| | | It cannot be null | +| | | if ``hash`` is defined. | ++--------------------+------------+-------------------------------+ .. _modelcontent: @@ -76,6 +88,9 @@ model. There are three model types: **notebook**, **file**, and **directory**. :class:`nbformat.notebooknode.NotebookNode` representing the .ipynb file represented by the model. See the `NBFormat`_ documentation for a full description. + - The ``hash`` field a hexdigest string of the hash value of the file. + If ``ContentManager.get`` not support hash, it should always be ``None``. + - ``hash_algorithm`` is the algorithm used to compute the hash value. - ``file`` models - The ``format`` field is either ``"text"`` or ``"base64"``. @@ -85,12 +100,16 @@ model. There are three model types: **notebook**, **file**, and **directory**. file models, ``content`` simply contains the file's bytes after decoding as UTF-8. Non-text (``base64``) files are read as bytes, base64 encoded, and then decoded as UTF-8. + - The ``hash`` field a hexdigest string of the hash value of the file. + If ``ContentManager.get`` not support hash, it should always be ``None``. + - ``hash_algorithm`` is the algorithm used to compute the hash value. - ``directory`` models - The ``format`` field is always ``"json"``. - The ``mimetype`` field is always ``None``. - The ``content`` field contains a list of :ref:`content-free` models representing the entities in the directory. + - The ``hash`` field is always ``None``. .. note:: @@ -107,41 +126,43 @@ model. There are three model types: **notebook**, **file**, and **directory**. .. code-block:: python - # Notebook Model with Content + # Notebook Model with Content and Hash { - 'content': { - 'metadata': {}, - 'nbformat': 4, - 'nbformat_minor': 0, - 'cells': [ + "content": { + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 0, + "cells": [ { - 'cell_type': 'markdown', - 'metadata': {}, - 'source': 'Some **Markdown**', + "cell_type": "markdown", + "metadata": {}, + "source": "Some **Markdown**", }, ], }, - 'created': datetime(2015, 7, 25, 19, 50, 19, 19865), - 'format': 'json', - 'last_modified': datetime(2015, 7, 25, 19, 50, 19, 19865), - 'mimetype': None, - 'name': 'a.ipynb', - 'path': 'foo/a.ipynb', - 'type': 'notebook', - 'writable': True, + "created": datetime(2015, 7, 25, 19, 50, 19, 19865), + "format": "json", + "last_modified": datetime(2015, 7, 25, 19, 50, 19, 19865), + "mimetype": None, + "name": "a.ipynb", + "path": "foo/a.ipynb", + "type": "notebook", + "writable": True, + "hash": "f5e43a0b1c2e7836ab3b4d6b1c35c19e2558688de15a6a14e137a59e4715d34b", + "hash_algorithm": "sha256", } # Notebook Model without Content { - 'content': None, - 'created': datetime.datetime(2015, 7, 25, 20, 17, 33, 271931), - 'format': None, - 'last_modified': datetime.datetime(2015, 7, 25, 20, 17, 33, 271931), - 'mimetype': None, - 'name': 'a.ipynb', - 'path': 'foo/a.ipynb', - 'type': 'notebook', - 'writable': True + "content": None, + "created": datetime.datetime(2015, 7, 25, 20, 17, 33, 271931), + "format": None, + "last_modified": datetime.datetime(2015, 7, 25, 20, 17, 33, 271931), + "mimetype": None, + "name": "a.ipynb", + "path": "foo/a.ipynb", + "type": "notebook", + "writable": True, } @@ -227,21 +248,28 @@ return for a more complete implementation. class NoOpCheckpoints(GenericCheckpointsMixin, Checkpoints): """requires the following methods:""" + def create_file_checkpoint(self, content, format, path): - """ -> checkpoint model""" + """-> checkpoint model""" + def create_notebook_checkpoint(self, nb, path): - """ -> checkpoint model""" + """-> checkpoint model""" + def get_file_checkpoint(self, checkpoint_id, path): - """ -> {'type': 'file', 'content': , 'format': {'text', 'base64'}}""" + """-> {'type': 'file', 'content': , 'format': {'text', 'base64'}}""" + def get_notebook_checkpoint(self, checkpoint_id, path): - """ -> {'type': 'notebook', 'content': }""" + """-> {'type': 'notebook', 'content': }""" + def delete_checkpoint(self, checkpoint_id, path): """deletes a checkpoint for a file""" + def list_checkpoints(self, path): """returns a list of checkpoint models for a given file, default just does one per file """ return [] + def rename_checkpoint(self, checkpoint_id, old_path, new_path): """renames checkpoint from old path to new path""" @@ -262,7 +290,7 @@ ContentsManager. PGContents_ is an example of a complete implementation of a custom ``ContentsManager``. It stores notebooks and files in PostgreSQL_ and encodes directories as SQL relations. PGContents also provides an example of how to - re-use the notebook's tests. + reuse the notebook's tests. .. _NBFormat: https://nbformat.readthedocs.io/en/latest/index.html .. _PGContents: https://github.com/quantopian/pgcontents diff --git a/docs/source/developers/extensions.rst b/docs/source/developers/extensions.rst index b54b919cc4..5c27d25747 100644 --- a/docs/source/developers/extensions.rst +++ b/docs/source/developers/extensions.rst @@ -1,3 +1,5 @@ +.. _extensions: + ================= Server Extensions ================= @@ -34,8 +36,8 @@ The easiest way to add endpoints and handle incoming requests is to subclass the from jupyter_server.base.handlers import JupyterHandler import tornado - class MyExtensionHandler(JupyterHandler): + class MyExtensionHandler(JupyterHandler): @tornado.web.authenticated def get(self): ... @@ -55,10 +57,8 @@ Then add this handler to Jupyter Server's Web Application through the ``_load_ju """ This function is called when the extension is loaded. """ - handlers = [ - ('/myextension/hello', MyExtensionHandler) - ] - serverapp.web_app.add_handlers('.*$', handlers) + handlers = [("/myextension/hello", MyExtensionHandler)] + serverapp.web_app.add_handlers(".*$", handlers) Making an extension discoverable @@ -77,19 +77,13 @@ Usually, this requires a ``module`` key with the import path to the extension's Returns a list of dictionaries with metadata describing where to find the `_load_jupyter_server_extension` function. """ - return [ - { - "module": "my_extension" - } - ] + return [{"module": "my_extension"}] Second, add the extension to the ServerApp's ``jpserver_extensions`` trait. This can be manually added by users in their ``jupyter_server_config.py`` file, .. code-block:: python - c.ServerApp.jpserver_extensions = { - "my_extension": True - } + c.ServerApp.jpserver_extensions = {"my_extension": True} or loaded from a JSON file in the ``jupyter_server_config.d`` directory under one of `Jupyter's paths`_. (See the `Distributing a server extension`_ section @@ -98,13 +92,7 @@ it.) .. code-block:: python - { - "ServerApp": { - "jpserver_extensions": { - "my_extension": true - } - } - } + {"ServerApp": {"jpserver_extensions": {"my_extension": true}}} Authoring a configurable extension application @@ -134,7 +122,6 @@ The basic structure of an ExtensionApp is shown below: class MyExtensionApp(ExtensionApp): - # -------------- Required traits -------------- name = "myextension" default_url = "/myextension" @@ -154,7 +141,7 @@ The basic structure of an ExtensionApp is shown below: ... # Update the self.settings trait to pass extra # settings to the underlying Tornado Web Application. - self.settings.update({'':...}) + self.settings.update({"": ...}) def initialize_handlers(self): ... @@ -209,7 +196,6 @@ Jupyter Server provides a convenient mixin class for adding these properties to class MyExtensionHandler(ExtensionHandlerMixin, JupyterHandler): - @tornado.web.authenticated def get(self): ... @@ -249,16 +235,14 @@ templates from the Jinja templating environment created by the ``ExtensionApp``. from jupyter_server.base.handlers import JupyterHandler from jupyter_server.extension.handler import ( ExtensionHandlerMixin, - ExtensionHandlerJinjaMixin + ExtensionHandlerJinjaMixin, ) import tornado + class MyExtensionHandler( - ExtensionHandlerMixin, - ExtensionHandlerJinjaMixin, - JupyterHandler + ExtensionHandlerMixin, ExtensionHandlerJinjaMixin, JupyterHandler ): - @tornado.web.authenticated def get(self): ... @@ -286,12 +270,7 @@ To make an ``ExtensionApp`` discoverable by Jupyter Server, add the ``app`` key+ Returns a list of dictionaries with metadata describing where to find the `_load_jupyter_server_extension` function. """ - return [ - { - "module": "myextension", - "app": MyExtensionApp - } - ] + return [{"module": "myextension", "app": MyExtensionApp}] Launching an ``ExtensionApp`` @@ -313,13 +292,11 @@ To make your extension executable from anywhere on your system, point an entry-p setup( - name='myfrontend', - ... + name="myfrontend", + # ... entry_points={ - 'console_scripts': [ - 'jupyter-myextension = myextension:launch_instance' - ] - } + "console_scripts": ["jupyter-myextension = myextension:launch_instance"] + }, ) ``ExtensionApp`` as a classic Notebook server extension @@ -351,13 +328,9 @@ Putting it all together, authors can distribute their extension following this s # Found in the __init__.py of package + def _jupyter_server_extension_points(): - return [ - { - "module": "myextension.app", - "app": MyExtensionApp - } - ] + return [{"module": "myextension.app", "app": MyExtensionApp}] 2. Create an extension by writing a ``_load_jupyter_server_extension()`` function or subclassing ``ExtensionApp``. This is where the extension logic will live (i.e. custom extension handlers, config, etc). See the sections above for more information on how to create an extension. @@ -397,7 +370,7 @@ Putting it all together, authors can distribute their extension following this s .. code-block:: console - jupyter server disable myextension + jupyter server extension disable myextension which will change the boolean value in the JSON file above. @@ -410,15 +383,14 @@ Putting it all together, authors can distribute their extension following this s setup( name="myextension", - ... + # ... include_package_data=True, data_files=[ ( "etc/jupyter/jupyter_server_config.d", - ["jupyter-config/jupyter_server_config.d/myextension.json"] + ["jupyter-config/jupyter_server_config.d/myextension.json"], ), - ] - + ], ) @@ -450,6 +422,7 @@ There are a few key steps to make this happen: def load_jupyter_server_extension(nb_server_app): ... + # Reference the old function name with the new function name. _load_jupyter_server_extension = load_jupyter_server_extension @@ -492,19 +465,18 @@ There are a few key steps to make this happen: setup( name="myextension", - ... + # ... include_package_data=True, data_files=[ ( "etc/jupyter/jupyter_server_config.d", - ["jupyter-config/jupyter_server_config.d/myextension.json"] + ["jupyter-config/jupyter_server_config.d/myextension.json"], ), ( "etc/jupyter/jupyter_notebook_config.d", - ["jupyter-config/jupyter_notebook_config.d/myextension.json"] + ["jupyter-config/jupyter_notebook_config.d/myextension.json"], ), - ] - + ], ) 3. (Optional) Point extension at the new favicon location. @@ -518,14 +490,13 @@ There are a few key steps to make this happen: .. code-block:: python def load_jupyter_server_extension(nb_server_app): - web_app = nb_server_app.web_app - host_pattern = '.*$' - base_url = web_app.settings['base_url'] + host_pattern = ".*$" + base_url = web_app.settings["base_url"] # Add custom extensions handler. custom_handlers = [ - ... + # ... ] # Favicon redirects. @@ -533,49 +504,74 @@ There are a few key steps to make this happen: ( url_path_join(base_url, "/static/favicons/favicon.ico"), RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon.ico") + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon.ico" + ) + }, ), ( url_path_join(base_url, "/static/favicons/favicon-busy-1.ico"), RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-1.ico")} + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-1.ico" + ) + }, ), ( url_path_join(base_url, "/static/favicons/favicon-busy-2.ico"), RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-2.ico")} + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-2.ico" + ) + }, ), ( url_path_join(base_url, "/static/favicons/favicon-busy-3.ico"), RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-3.ico")} + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-busy-3.ico" + ) + }, ), ( url_path_join(base_url, "/static/favicons/favicon-file.ico"), RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-file.ico")} + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-file.ico" + ) + }, ), ( url_path_join(base_url, "/static/favicons/favicon-notebook.ico"), RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-notebook.ico")} + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-notebook.ico" + ) + }, ), ( url_path_join(base_url, "/static/favicons/favicon-terminal.ico"), RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-terminal.ico")} + { + "url": url_path_join( + serverapp.base_url, "static/base/images/favicon-terminal.ico" + ) + }, ), ( url_path_join(base_url, "/static/logo/logo.png"), RedirectHandler, - {"url": url_path_join(serverapp.base_url, "static/base/images/logo.png")} + {"url": url_path_join(serverapp.base_url, "static/base/images/logo.png")}, ), ] - web_app.add_handlers( - host_pattern, - custom_handlers + favicon_redirects - ) + web_app.add_handlers(host_pattern, custom_handlers + favicon_redirects) -.. _`classic Notebook Server`: https://jupyter-notebook.readthedocs.io/en/stable/extending/handlers.html +.. _`classic Notebook Server`: https://jupyter-notebook.readthedocs.io/en/v6.5.4/extending/handlers.html diff --git a/docs/source/developers/index.rst b/docs/source/developers/index.rst index b8f5140dbb..fb40f24797 100644 --- a/docs/source/developers/index.rst +++ b/docs/source/developers/index.rst @@ -8,9 +8,11 @@ These pages target people writing Jupyter Web applications and server extensions :maxdepth: 1 :name: developers + architecture dependency rest-api extensions savehooks contents websocket-protocols + API Docs <../api/modules> diff --git a/docs/source/developers/savehooks.rst b/docs/source/developers/savehooks.rst index f2998938ae..4a466ef830 100644 --- a/docs/source/developers/savehooks.rst +++ b/docs/source/developers/savehooks.rst @@ -51,6 +51,7 @@ A post-save hook to make a script equivalent whenever the notebook is saved _script_exporter = None + def script_post_save(model, os_path, contents_manager, **kwargs): """convert notebooks to Python script after save with nbconvert @@ -58,7 +59,7 @@ A post-save hook to make a script equivalent whenever the notebook is saved """ from nbconvert.exporters.script import ScriptExporter - if model['type'] != 'notebook': + if model["type"] != "notebook": return global _script_exporter @@ -69,14 +70,15 @@ A post-save hook to make a script equivalent whenever the notebook is saved log = contents_manager.log base, ext = os.path.splitext(os_path) - py_fname = base + '.py' + py_fname = base + ".py" script, resources = _script_exporter.from_filename(os_path) - script_fname = base + resources.get('output_extension', '.txt') + script_fname = base + resources.get("output_extension", ".txt") log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir)) - with io.open(script_fname, 'w', encoding='utf-8') as f: + with io.open(script_fname, "w", encoding="utf-8") as f: f.write(script) + c.FileContentsManager.post_save_hook = script_post_save diff --git a/docs/source/developers/websocket-protocols.rst b/docs/source/developers/websocket-protocols.rst index aeb5b44896..72017e86d7 100644 --- a/docs/source/developers/websocket-protocols.rst +++ b/docs/source/developers/websocket-protocols.rst @@ -81,11 +81,11 @@ The message can be deserialized by parsing ``msg`` as a JSON object (after decod .. code-block:: python msg = { - 'channel': channel, - 'header': header, - 'parent_header': parent_header, - 'metadata': metadata, - 'content': content + "channel": channel, + "header": header, + "parent_header": parent_header, + "metadata": metadata, + "content": content, } Then retrieving the channel name, and updating with the buffers, if any: @@ -147,7 +147,8 @@ Where: .. code-block:: python import json - channel = bin_msg[offset_0:offset_1].decode('utf-8') + + channel = bin_msg[offset_0:offset_1].decode("utf-8") header = json.loads(bin_msg[offset_1:offset_2]) parent_header = json.loads(bin_msg[offset_2:offset_3]) metadata = json.loads(bin_msg[offset_3:offset_4]) diff --git a/docs/source/images/jupyter-server-architecture.drawio.png b/docs/source/images/jupyter-server-architecture.drawio.png new file mode 100644 index 0000000000..385a35bd1d Binary files /dev/null and b/docs/source/images/jupyter-server-architecture.drawio.png differ diff --git a/docs/source/images/session-create.drawio.png b/docs/source/images/session-create.drawio.png new file mode 100644 index 0000000000..a344095ed6 Binary files /dev/null and b/docs/source/images/session-create.drawio.png differ diff --git a/docs/source/images/session-delete.drawio.png b/docs/source/images/session-delete.drawio.png new file mode 100644 index 0000000000..d2093fb1f3 Binary files /dev/null and b/docs/source/images/session-delete.drawio.png differ diff --git a/docs/source/index.rst b/docs/source/index.rst index 11922b9ed4..a3f6abb517 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -5,7 +5,7 @@ You've landed on the documentation pages for the **Jupyter Server** Project. Som * `Jupyter Server Github Repo `_, the source code we describe in this code. * `Jupyter Notebook Github Repo `_ , the source code for the classic Notebook. -* `JupyterLab Github Repo `_, the JupyterLab server wich runs on the Jupyter Server. +* `JupyterLab Github Repo `_, the JupyterLab server which runs on the Jupyter Server. Introduction diff --git a/docs/source/operators/configuring-logging.rst b/docs/source/operators/configuring-logging.rst new file mode 100644 index 0000000000..1bb382fa73 --- /dev/null +++ b/docs/source/operators/configuring-logging.rst @@ -0,0 +1,143 @@ +.. _configurable_logging: + +Configuring Logging +=================== + +Jupyter Server (and Jupyter Server extension applications such as Jupyter Lab) +are Traitlets applications. + +By default Traitlets applications log to stderr. You can configure them to log +to other locations e.g. log files. + +Logging is configured via the ``logging_config`` "trait" which accepts a +:py:func:`logging.config.dictConfig` object. For more information look for +``Application.logging_config`` in :ref:`other-full-config`. + + +Examples +-------- + +.. _configurable_logging.jupyter_server: + +Jupyter Server +^^^^^^^^^^^^^^ + +A minimal example which logs Jupyter Server output to a file: + +.. code-block:: python + + c.ServerApp.logging_config = { + "version": 1, + "handlers": { + "logfile": { + "class": "logging.FileHandler", + "level": "DEBUG", + "filename": "jupyter_server.log", + }, + }, + "loggers": { + "ServerApp": { + "level": "DEBUG", + "handlers": ["console", "logfile"], + }, + }, + } + +.. note:: + + To keep the default behaviour of logging to stderr ensure the ``console`` + handler (provided by Traitlets) is included in the list of handlers. + +.. warning:: + + Be aware that the ``ServerApp`` log may contain security tokens. If + redirecting to log files ensure they have appropriate permissions. + + +.. _configurable_logging.extension_applications: + +Jupyter Server Extension Applications (e.g. Jupyter Lab) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +An example which logs both Jupyter Server and Jupyter Lab output to a file: + +.. note:: + + Because Jupyter Server and its extension applications are separate Traitlets + applications their logging must be configured separately. + +.. code-block:: python + + c.ServerApp.logging_config = { + "version": 1, + "handlers": { + "logfile": { + "class": "logging.FileHandler", + "level": "DEBUG", + "filename": "jupyter_server.log", + "formatter": "my_format", + }, + }, + "formatters": { + "my_format": { + "format": "%(asctime)s %(levelname)-8s %(name)-15s %(message)s", + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + }, + "loggers": { + "ServerApp": { + "level": "DEBUG", + "handlers": ["console", "logfile"], + }, + }, + } + + c.LabApp.logging_config = { + "version": 1, + "handlers": { + "logfile": { + "class": "logging.FileHandler", + "level": "DEBUG", + "filename": "jupyter_server.log", + "formatter": "my_format", + }, + }, + "formatters": { + "my_format": { + "format": "%(asctime)s %(levelname)-8s %(name)-15s %(message)s", + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + }, + "loggers": { + "LabApp": { + "level": "DEBUG", + "handlers": ["console", "logfile"], + }, + }, + } + +.. note:: + + The configured application name should match the logger name + e.g. ``c.LabApp.logging_config`` defines a logger called ``LabApp``. + +.. tip:: + + This diff modifies the example to log Jupyter Server and Jupyter Lab output + to different files: + + .. code-block:: diff + + --- before + +++ after + c.LabApp.logging_config = { + 'version': 1, + 'handlers': { + 'logfile': { + 'class': 'logging.FileHandler', + 'level': 'DEBUG', + - 'filename': 'jupyter_server.log', + + 'filename': 'jupyter_lab.log', + 'formatter': 'my_format', + }, + }, diff --git a/docs/source/operators/index.rst b/docs/source/operators/index.rst index 41354bce73..846aa2cd05 100644 --- a/docs/source/operators/index.rst +++ b/docs/source/operators/index.rst @@ -13,3 +13,4 @@ These pages are targeted at people using, configuring, and/or deploying multiple migrate-from-nbserver public-server security + configuring-logging diff --git a/docs/source/operators/multiple-extensions.rst b/docs/source/operators/multiple-extensions.rst index 192f221ac3..d4241bb729 100644 --- a/docs/source/operators/multiple-extensions.rst +++ b/docs/source/operators/multiple-extensions.rst @@ -7,7 +7,7 @@ Managing multiple extensions One of the major benefits of Jupyter Server is that you can run serve multiple Jupyter frontend applications above the same Tornado web server. That's because every Jupyter frontend application is now a server extension. -When you run a Jupyter Server will multiple extensions enabled, each extension +When you run a Jupyter Server with multiple extensions enabled, each extension appends its own set of handlers and static assets to the server. Listing extensions diff --git a/docs/source/operators/public-server.rst b/docs/source/operators/public-server.rst index d21244ad91..dc534ca832 100644 --- a/docs/source/operators/public-server.rst +++ b/docs/source/operators/public-server.rst @@ -10,8 +10,8 @@ serving HTTP requests. .. note:: By default, Jupyter Server runs locally at 127.0.0.1:8888 - and is accessible only from `localhost`. You may access the - server from the browser using `http://127.0.0.1:8888`. + and is accessible only from ``localhost``. You may access the + server from the browser using ``http://127.0.0.1:8888``. This document describes how you can :ref:`secure a Jupyter server ` and how to @@ -31,13 +31,13 @@ This document describes how you can To use JupyterHub, you need a Unix server (typically Linux) running somewhere that is accessible to your users on a network. This may run over the public internet, but doing so introduces additional - `security concerns `_. + `security concerns `_. .. _ZeroMQ: https://zeromq.org/ -.. _Tornado: with Found to http://www.tornadoweb.org/en/stable/ +.. _Tornado: http://www.tornadoweb.org/en/stable/ .. _JupyterHub: https://jupyterhub.readthedocs.io/en/latest/ @@ -78,8 +78,8 @@ Automatic Password setup As of notebook 5.3, the first time you log-in using a token, the server should give you the opportunity to setup a password from the user interface. -You will be presented with a form asking for the current _token_, as well as -your _new_ _password_ ; enter both and click on ``Login and setup new password``. +You will be presented with a form asking for the current *token*, as well as +your *new password*; enter both and click on ``Login and setup new password``. Next time you need to log in you'll be able to use the new password instead of the login token, otherwise follow the procedure to set a password from the @@ -111,19 +111,19 @@ Preparing a hashed password ~~~~~~~~~~~~~~~~~~~~~~~~~~~ You can prepare a hashed password manually, using the function -:func:`notebook.auth.security.passwd`: +:func:`jupyter_server.auth.passwd`: -.. code-block:: python +.. code-block:: pycon >>> from jupyter_server.auth import passwd >>> passwd() - ... Enter password: - ... Verify password: + Enter password: + Verify password: 'sha1:67c9e60bb8b6:9ffede0825894254b2e042ea597d771089e11aed' .. caution:: - :func:`~notebook.auth.security.passwd` when called with no arguments + :func:`~jupyter_server.auth.passwd` when called with no arguments will prompt you to enter and verify your password such as in the above code snippet. Although the function can also be passed a string as an argument such as ``passwd('mypassword')``, please @@ -180,7 +180,7 @@ compliant self-signed certificate that will not raise warnings, it is possible certificate and follow the steps in :ref:`using-lets-encrypt` to set up a public server. -.. _OWASP: https://www.owasp.org/index.php/Main_Page +.. _OWASP: https://owasp.org/sitemap/ .. _tutorial: https://arstechnica.com/information-technology/2009/12/how-to-get-set-with-a-secure-sertificate-for-free/ .. _jupyter_public_server: @@ -308,7 +308,7 @@ instructions about modifying ``jupyter_server_config.py``): .. code-block:: python - c.ServerApp.base_url = '/ipython/' + c.ServerApp.base_url = "/ipython/" Embedding the notebook in another website ----------------------------------------- @@ -316,31 +316,18 @@ Embedding the notebook in another website Sometimes you may want to embed the notebook somewhere on your website, e.g. in an IFrame. To do this, you may need to override the Content-Security-Policy to allow embedding. Assuming your website is at -`https://mywebsite.example.com`, you can embed the notebook on your website +``https://mywebsite.example.com``, you can embed the notebook on your website with the following configuration setting in :file:`jupyter_server_config.py`: .. code-block:: python c.ServerApp.tornado_settings = { - 'headers': { - 'Content-Security-Policy': "frame-ancestors https://mywebsite.example.com 'self' " + "headers": { + "Content-Security-Policy": "frame-ancestors https://mywebsite.example.com 'self' " } } -When embedding the notebook in a website using an iframe, -consider putting the notebook in single-tab mode. -Since the notebook opens some links in new tabs by default, -single-tab mode keeps the notebook from opening additional tabs. -Adding the following to :file:`~/.jupyter/custom/custom.js` will enable -single-tab mode: - -.. code-block:: javascript - - define(['base/js/namespace'], function(Jupyter){ - Jupyter._target = '_self'; - }); - Using a gateway server for kernel management -------------------------------------------- @@ -364,7 +351,7 @@ or in :file:`jupyter_notebook_config.py`: .. code-block:: python - c.GatewayClient.url = http://my-gateway-server:8888 + c.GatewayClient.url = "http://my-gateway-server:8888" When provided, all kernel specifications will be retrieved from the specified Gateway server and all kernels will be managed by that server. This option enables the ability to target kernel processes @@ -430,7 +417,7 @@ Using ``jupyter server`` as a kernels repeatedly crashing, likely due to a lack of `PID reaping `_. To avoid this, use the `tini `_ ``init`` as your -Dockerfile `ENTRYPOINT`:: +Dockerfile ``ENTRYPOINT``:: # Add Tini. Tini operates as a process subreaper for jupyter. This prevents # kernel crashes. diff --git a/docs/source/operators/security.rst b/docs/source/operators/security.rst index 87148cbe41..2a63db7087 100644 --- a/docs/source/operators/security.rst +++ b/docs/source/operators/security.rst @@ -64,30 +64,150 @@ you can set a password for your server. :command:`jupyter server password` will prompt you for a password, and store the hashed password in your :file:`jupyter_server_config.json`. -.. versionadded:: 5.0 - - :command:`jupyter server password` command is added. - - It is possible disable authentication altogether by setting the token and password to empty strings, but this is **NOT RECOMMENDED**, unless authentication or access restrictions are handled at a different layer in your web application: .. sourcecode:: python - c.ServerApp.token = '' - c.ServerApp.password = '' + c.ServerApp.token = "" + c.ServerApp.password = "" -Authorization -------------- + +Authentication and Authorization +-------------------------------- .. versionadded:: 2.0 +There are two steps to deciding whether to allow a given request to be happen. + +The first step is "Authentication" (identifying who is making the request). +This is handled by the :class:`jupyter_server.auth.IdentityProvider`. + +Whether a given user is allowed to take a specific action is called "Authorization", +and is handled separately, by an :class:`~jupyter_server.auth.Authorizer`. + +These two classes may work together, +as the information returned by the IdentityProvider is given to the Authorizer when it makes its decisions. + +Authentication always takes precedence because if no user is authenticated, +no authorization checks need to be made, +as all requests requiring *authorization* must first complete *authentication*. + +Identity Providers +****************** + +The :class:`jupyter_server.auth.IdentityProvider` class is responsible for the "authentication" step, +identifying the user making the request, +and constructing information about them. + +It principally implements two methods. + +.. autoclass:: jupyter_server.auth.IdentityProvider + + .. automethod:: get_user + .. automethod:: identity_model + +The first is :meth:`jupyter_server.auth.IdentityProvider.get_user`. +This method is given a RequestHandler, and is responsible for deciding whether there is an authenticated user making the request. +If the request is authenticated, it should return a :class:`jupyter_server.auth.User` object representing the authenticated user. +It should return None if the request is not authenticated. + +The default implementation accepts token or password authentication. + +This User object will be available as ``self.current_user`` in any request handler. +Request methods decorated with tornado's ``@web.authenticated`` decorator +will only be allowed if this method returns something. + +The User object will be a Python :py:class:`dataclasses.dataclass` - ``jupyter_server.auth.User``: + +.. autoclass:: jupyter_server.auth.User + +A custom IdentityProvider *may* return a custom subclass. + + +The next method an identity provider has is :meth:`~jupyter_server.auth.IdentityProvider.identity_model`. +``identity_model(user)`` is responsible for transforming the user object returned from ``.get_user()`` +into a standard identity model dictionary, +for use in the ``/api/me`` endpoint. + +If your user object is a simple username string or a dict with a ``username`` field, +you may not need to implement this method, as the default implementation will suffice. + +Any required fields missing from the dict returned by this method will be filled-out with defaults. +Only ``username`` is strictly required, if that is all the information the identity provider has available. + +Missing will be derived according to: + +- if ``name`` is missing, use ``username`` +- if ``display_name`` is missing, use ``name`` + +Other required fields will be filled with ``None``. + + +Identity Model +^^^^^^^^^^^^^^ + +The identity model is the model accessed at ``/api/me``, and describes the currently authenticated user. + +It has the following fields: + +username + (string) + Unique string identifying the user. + Must be non-empty. +name + (string) + For-humans name of the user. + May be the same as ``username`` in systems where only usernames are available. +display_name + (string) + Alternate rendering of name for display, such as a nickname. + Often the same as ``name``. +initials + (string or null) + Short string of initials. + Initials should not be derived automatically due to localization issues. + May be ``null`` if unavailable. +avatar_url + (string or null) + URL of an avatar image to be used for the user. + May be ``null`` if unavailable. +color + (string or null) + A CSS color string to use as a preferred color, + such as for collaboration cursors. + May be ``null`` if unavailable. + + +The default implementation of the identity provider is stateless, meaning it doesn't store user information +on the server side. Instead, it utilizes session cookies to generate and store random user information on the +client side. + +When a user logs in or authenticates, the server generates a session cookie that is stored on the client side. +This session cookie is used to keep track of the identity model between requests. If the client does not +support session cookies or fails to send the cookie in subsequent requests, the server will treat each request +as coming from a new anonymous user and generate a new set of random user information for each request. + +To ensure proper functionality of the identity model and to maintain user context between requests, it's +important for clients to support session cookies and send it in subsequent requests. Failure to do so may +result in the server generating a new anonymous user for each request, leading to loss of user context. + +Authorization +************* + +Authorization is the second step in allowing an action, +after a user has been *authenticated* by the IdentityProvider. + Authorization in Jupyter Server serves to provide finer grained control of access to its API resources. With authentication, requests are accepted if the current user is known by the server. Thus it can restrain access to specific users, but there is no way to give allowed users more or less permissions. Jupyter Server provides a thin and extensible authorization layer which checks if the current user is authorized to make a specific request. +.. autoclass:: jupyter_server.auth.Authorizer + + .. automethod:: is_authorized + This is done by calling a ``is_authorized(handler, user, action, resource)`` method before each request handler. Each request is labeled as either a "read", "write", or "execute" ``action``: @@ -102,8 +222,8 @@ request handler. Each request is labeled as either a "read", "write", or "execut to ~all other permissions via other means. The ``resource`` being accessed refers to the resource name in the Jupyter Server's API endpoints. -In most cases, this is matches the field after `/api/`. -For instance, values for ``resource`` in the endpoints provided by the base jupyter server package, +In most cases, this is the field after ``/api/``. +For instance, values for ``resource`` in the endpoints provided by the base Jupyter Server package, and the corresponding permissions: .. list-table:: @@ -196,6 +316,7 @@ follows: from jupyter_server.auth import Authorizer + class MyAuthorizationManager(Authorizer): """Class for authorizing access to resources in the Jupyter Server. @@ -208,7 +329,9 @@ follows: is accepted; if it returns False, the server returns a 403 (Forbidden) error code. """ - def is_authorized(self, handler: JupyterHandler, user: Any, action: str, resource: str) -> bool: + def is_authorized( + self, handler: JupyterHandler, user: Any, action: str, resource: str + ) -> bool: """A method to determine if `user` is authorized to perform `action` (read, write, or execute) on the `resource` type. @@ -231,7 +354,8 @@ follows: The ``is_authorized()`` method will automatically be called whenever a handler is decorated with ``@authorized`` (from ``jupyter_server.auth``), similarly to the -``@authenticated`` decorator for authorization (from ``tornado.web``). +``@authenticated`` decorator for authentication (from ``tornado.web``). + Security in notebook documents ============================== diff --git a/docs/source/other/full-config.rst b/docs/source/other/full-config.rst deleted file mode 100644 index bc5d5af3ef..0000000000 --- a/docs/source/other/full-config.rst +++ /dev/null @@ -1,1394 +0,0 @@ -.. _other-full-config: - - -Config file and command line options -==================================== - -The Jupyter Server can be run with a variety of command line arguments. -A list of available options can be found below in the :ref:`options section -`. - -Defaults for these options can also be set by creating a file named -``jupyter_server_config.py`` in your Jupyter folder. The Jupyter -folder is in your home directory, ``~/.jupyter``. - -To create a ``jupyter_server_config.py`` file, with all the defaults -commented out, you can use the following command line:: - - $ jupyter server --generate-config - - -.. _options: - -Options -------- - -This list of options can be generated by running the following and hitting -enter:: - - $ jupyter server --help-all - - - - -Application.log_datefmt : Unicode - Default: ``'%Y-%m-%d %H:%M:%S'`` - - The date format used by logging formatters for %(asctime)s - -Application.log_format : Unicode - Default: ``'[%(name)s]%(highlevel)s %(message)s'`` - - The Logging format template - -Application.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` - Default: ``30`` - - Set the log level by value or name. - -Application.show_config : Bool - Default: ``False`` - - Instead of starting the Application, dump configuration to stdout - -Application.show_config_json : Bool - Default: ``False`` - - Instead of starting the Application, dump configuration to stdout (as JSON) - -JupyterApp.answer_yes : Bool - Default: ``False`` - - Answer yes to any prompts. - -JupyterApp.config_file : Unicode - Default: ``''`` - - Full path of a config file. - -JupyterApp.config_file_name : Unicode - Default: ``''`` - - Specify a config file to load. - -JupyterApp.generate_config : Bool - Default: ``False`` - - Generate default config file. - -JupyterApp.log_datefmt : Unicode - Default: ``'%Y-%m-%d %H:%M:%S'`` - - The date format used by logging formatters for %(asctime)s - -JupyterApp.log_format : Unicode - Default: ``'[%(name)s]%(highlevel)s %(message)s'`` - - The Logging format template - -JupyterApp.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` - Default: ``30`` - - Set the log level by value or name. - -JupyterApp.show_config : Bool - Default: ``False`` - - Instead of starting the Application, dump configuration to stdout - -JupyterApp.show_config_json : Bool - Default: ``False`` - - Instead of starting the Application, dump configuration to stdout (as JSON) - -ServerApp.allow_credentials : Bool - Default: ``False`` - - Set the Access-Control-Allow-Credentials: true header - -ServerApp.allow_origin : Unicode - Default: ``''`` - - Set the Access-Control-Allow-Origin header - - Use '*' to allow any origin to access your server. - - Takes precedence over allow_origin_pat. - - -ServerApp.allow_origin_pat : Unicode - Default: ``''`` - - Use a regular expression for the Access-Control-Allow-Origin header - - Requests from an origin matching the expression will get replies with: - - Access-Control-Allow-Origin: origin - - where `origin` is the origin of the request. - - Ignored if allow_origin is set. - - -ServerApp.allow_password_change : Bool - Default: ``True`` - - Allow password to be changed at login for the Jupyter server. - - While logging in with a token, the Jupyter server UI will give the opportunity to - the user to enter a new password at the same time that will replace - the token login mechanism. - - This can be set to false to prevent changing password from the UI/API. - - -ServerApp.allow_remote_access : Bool - Default: ``False`` - - Allow requests where the Host header doesn't point to a local server - - By default, requests get a 403 forbidden response if the 'Host' header - shows that the browser thinks it's on a non-local domain. - Setting this option to True disables this check. - - This protects against 'DNS rebinding' attacks, where a remote web server - serves you a page and then changes its DNS to send later requests to a - local IP, bypassing same-origin checks. - - Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, - along with hostnames configured in local_hostnames. - - -ServerApp.allow_root : Bool - Default: ``False`` - - Whether to allow the user to run the server as root. - -ServerApp.answer_yes : Bool - Default: ``False`` - - Answer yes to any prompts. - -ServerApp.base_url : Unicode - Default: ``'/'`` - - The base URL for the Jupyter server. - - Leading and trailing slashes can be omitted, - and will automatically be added. - - -ServerApp.browser : Unicode - Default: ``''`` - - Specify what command to use to invoke a web - browser when starting the server. If not specified, the - default browser will be determined by the `webbrowser` - standard library module, which allows setting of the - BROWSER environment variable to override it. - - -ServerApp.certfile : Unicode - Default: ``''`` - - The full path to an SSL/TLS certificate file. - -ServerApp.client_ca : Unicode - Default: ``''`` - - The full path to a certificate authority certificate for SSL/TLS client authentication. - -ServerApp.config_file : Unicode - Default: ``''`` - - Full path of a config file. - -ServerApp.config_file_name : Unicode - Default: ``''`` - - Specify a config file to load. - -ServerApp.config_manager_class : Type - Default: ``'jupyter_server.services.config.manager.ConfigManager'`` - - The config manager class to use - -ServerApp.contents_manager_class : Type - Default: ``'jupyter_server.services.contents.largefilemanager.LargeFileM...`` - - The content manager class to use. - -ServerApp.cookie_options : Dict - Default: ``{}`` - - Extra keyword arguments to pass to `set_secure_cookie`. See tornado's set_secure_cookie docs for details. - -ServerApp.cookie_secret : Bytes - Default: ``b''`` - - The random bytes used to secure cookies. - By default this is a new random number every time you start the server. - Set it to a value in a config file to enable logins to persist across server sessions. - - Note: Cookie secrets should be kept private, do not share config files with - cookie_secret stored in plaintext (you can read the value from a file). - - -ServerApp.cookie_secret_file : Unicode - Default: ``''`` - - The file where the cookie secret is stored. - -ServerApp.custom_display_url : Unicode - Default: ``''`` - - Override URL shown to users. - - Replace actual URL, including protocol, address, port and base URL, - with the given value when displaying URL to the users. Do not change - the actual connection URL. If authentication token is enabled, the - token is added to the custom URL automatically. - - This option is intended to be used when the URL to display to the user - cannot be determined reliably by the Jupyter server (proxified - or containerized setups for example). - -ServerApp.default_url : Unicode - Default: ``'/'`` - - The default URL to redirect to from `/` - -ServerApp.disable_check_xsrf : Bool - Default: ``False`` - - Disable cross-site-request-forgery protection - - Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, - requiring API requests to either: - - - originate from pages served by this server (validated with XSRF cookie and token), or - - authenticate with a token - - Some anonymous compute resources still desire the ability to run code, - completely without authentication. - These services can disable all authentication and security checks, - with the full knowledge of what that implies. - - -ServerApp.extra_services : List - Default: ``[]`` - - handlers that should be loaded at higher priority than the default services - -ServerApp.extra_static_paths : List - Default: ``[]`` - - Extra paths to search for serving static files. - - This allows adding javascript/css to be available from the Jupyter server machine, - or overriding individual files in the IPython - -ServerApp.extra_template_paths : List - Default: ``[]`` - - Extra paths to search for serving jinja templates. - - Can be used to override templates from jupyter_server.templates. - -ServerApp.file_to_run : Unicode - Default: ``''`` - - No description - -ServerApp.generate_config : Bool - Default: ``False`` - - Generate default config file. - -ServerApp.get_secure_cookie_kwargs : Dict - Default: ``{}`` - - Extra keyword arguments to pass to `get_secure_cookie`. See tornado's get_secure_cookie docs for details. - -ServerApp.iopub_data_rate_limit : Float - Default: ``1000000`` - - (bytes/sec) - Maximum rate at which stream output can be sent on iopub before they are - limited. - -ServerApp.iopub_msg_rate_limit : Float - Default: ``1000`` - - (msgs/sec) - Maximum rate at which messages can be sent on iopub before they are - limited. - -ServerApp.ip : Unicode - Default: ``'localhost'`` - - The IP address the Jupyter server will listen on. - -ServerApp.jinja_environment_options : Dict - Default: ``{}`` - - Supply extra arguments that will be passed to Jinja environment. - -ServerApp.jinja_template_vars : Dict - Default: ``{}`` - - Extra variables to supply to jinja templates when rendering. - -ServerApp.jpserver_extensions : Dict - Default: ``{}`` - - Dict of Python modules to load as notebook server extensions.Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order. - -ServerApp.kernel_manager_class : Type - Default: ``'jupyter_server.services.kernels.kernelmanager.MappingKernelM...`` - - The kernel manager class to use. - -ServerApp.kernel_spec_manager_class : Type - Default: ``'jupyter_client.kernelspec.KernelSpecManager'`` - - - The kernel spec manager class to use. Should be a subclass - of `jupyter_client.kernelspec.KernelSpecManager`. - - The Api of KernelSpecManager is provisional and might change - without warning between this version of Jupyter and the next stable one. - - -ServerApp.keyfile : Unicode - Default: ``''`` - - The full path to a private key file for usage with SSL/TLS. - -ServerApp.local_hostnames : List - Default: ``['localhost']`` - - Hostnames to allow as local when allow_remote_access is False. - - Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted - as local as well. - - -ServerApp.log_datefmt : Unicode - Default: ``'%Y-%m-%d %H:%M:%S'`` - - The date format used by logging formatters for %(asctime)s - -ServerApp.log_format : Unicode - Default: ``'[%(name)s]%(highlevel)s %(message)s'`` - - The Logging format template - -ServerApp.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` - Default: ``30`` - - Set the log level by value or name. - -ServerApp.login_handler_class : Type - Default: ``'jupyter_server.auth.login.LoginHandler'`` - - The login handler class to use. - -ServerApp.logout_handler_class : Type - Default: ``'jupyter_server.auth.logout.LogoutHandler'`` - - The logout handler class to use. - -ServerApp.max_body_size : Int - Default: ``536870912`` - - - Sets the maximum allowed size of the client request body, specified in - the Content-Length request header field. If the size in a request - exceeds the configured value, a malformed HTTP message is returned to - the client. - - Note: max_body_size is applied even in streaming mode. - - -ServerApp.max_buffer_size : Int - Default: ``536870912`` - - - Gets or sets the maximum amount of memory, in bytes, that is allocated - for use by the buffer manager. - - -ServerApp.notebook_dir : Unicode - Default: ``''`` - - DEPRECATED, use root_dir. - -ServerApp.open_browser : Bool - Default: ``False`` - - Whether to open in a browser after starting. - The specific browser used is platform dependent and - determined by the python standard library `webbrowser` - module, unless it is overridden using the --browser - (ServerApp.browser) configuration option. - - -ServerApp.password : Unicode - Default: ``''`` - - Hashed password to use for web authentication. - - To generate, type in a python/IPython shell: - - from jupyter_server.auth import passwd; passwd() - - The string should be of the form type:salt:hashed-password. - - -ServerApp.password_required : Bool - Default: ``False`` - - Forces users to use a password for the Jupyter server. - This is useful in a multi user environment, for instance when - everybody in the LAN can access each other's machine through ssh. - - In such a case, serving on localhost is not secure since - any user can connect to the Jupyter server via ssh. - - - -ServerApp.port : Int - Default: ``8888`` - - The port the Jupyter server will listen on. - -ServerApp.port_retries : Int - Default: ``50`` - - The number of additional ports to try if the specified port is not available. - -ServerApp.pylab : Unicode - Default: ``'disabled'`` - - - DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. - - -ServerApp.quit_button : Bool - Default: ``True`` - - If True, display controls to shut down the Jupyter server, such as menu items or buttons. - -ServerApp.rate_limit_window : Float - Default: ``3`` - - (sec) Time window used to - check the message and data rate limits. - -ServerApp.reraise_server_extension_failures : Bool - Default: ``False`` - - Reraise exceptions encountered loading server extensions? - -ServerApp.root_dir : Unicode - Default: ``''`` - - The directory to use for notebooks and kernels. - -ServerApp.session_manager_class : Type - Default: ``'jupyter_server.services.sessions.sessionmanager.SessionManager'`` - - The session manager class to use. - -ServerApp.show_config : Bool - Default: ``False`` - - Instead of starting the Application, dump configuration to stdout - -ServerApp.show_config_json : Bool - Default: ``False`` - - Instead of starting the Application, dump configuration to stdout (as JSON) - -ServerApp.shutdown_no_activity_timeout : Int - Default: ``0`` - - Shut down the server after N seconds with no kernels or terminals running - and no activity. This can be used together with culling idle kernels - (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server - when it's not in use. This is not precisely timed: it may shut down up to - a minute later. 0 (the default) disables this automatic shutdown. - -ServerApp.ssl_options : Dict - Default: ``{}`` - - Supply SSL options for the tornado HTTPServer. - See the tornado docs for details. - -ServerApp.terminado_settings : Dict - Default: ``{}`` - - Supply overrides for terminado. Currently only supports "shell_command". - -ServerApp.terminals_enabled : Bool - Default: ``True`` - - Set to False to disable terminals. - - This does *not* make the server more secure by itself. - Anything the user can in a terminal, they can also do in a notebook. - - Terminals may also be automatically disabled if the terminado package - is not available. - - -ServerApp.token : Unicode - Default: ``''`` - - Token used for authenticating first-time connections to the server. - - When no password is enabled, - the default is to generate a new, random token. - - Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. - - -ServerApp.tornado_settings : Dict - Default: ``{}`` - - Supply overrides for the tornado.web.Application that the Jupyter server uses. - -ServerApp.trust_xheaders : Bool - Default: ``False`` - - Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headerssent by the upstream reverse proxy. Necessary if the proxy handles SSL - -ServerApp.webbrowser_open_new : Int - Default: ``2`` - - Specify where to open the server on startup. This is the - `new` argument passed to the standard library method `webbrowser.open`. - The behaviour is not guaranteed, but depends on browser support. Valid - values are: - - - 2 opens a new tab, - - 1 opens a new window, - - 0 opens in an existing window. - - See the `webbrowser.open` documentation for details. - - -ServerApp.websocket_compression_options : Any - Default: ``None`` - - - Set the tornado compression options for websocket connections. - - This value will be returned from :meth:`WebSocketHandler.get_compression_options`. - None (default) will disable compression. - A dict (even an empty one) will enable compression. - - See the tornado docs for WebSocketHandler.get_compression_options for details. - - -ServerApp.websocket_url : Unicode - Default: ``''`` - - The base URL for websockets, - if it differs from the HTTP server (hint: it almost certainly doesn't). - - Should be in the form of an HTTP origin: ws[s]://hostname[:port] - - -ConnectionFileMixin.connection_file : Unicode - Default: ``''`` - - JSON file in which to store connection info [default: kernel-.json] - - This file will contain the IP, ports, and authentication key needed to connect - clients to this kernel. By default, this file will be created in the security dir - of the current profile, but can be specified by absolute path. - - -ConnectionFileMixin.control_port : Int - Default: ``0`` - - set the control (ROUTER) port [default: random] - -ConnectionFileMixin.hb_port : Int - Default: ``0`` - - set the heartbeat port [default: random] - -ConnectionFileMixin.iopub_port : Int - Default: ``0`` - - set the iopub (PUB) port [default: random] - -ConnectionFileMixin.ip : Unicode - Default: ``''`` - - Set the kernel's IP address [default localhost]. - If the IP address is something other than localhost, then - Consoles on other machines will be able to connect - to the Kernel, so be careful! - -ConnectionFileMixin.shell_port : Int - Default: ``0`` - - set the shell (ROUTER) port [default: random] - -ConnectionFileMixin.stdin_port : Int - Default: ``0`` - - set the stdin (ROUTER) port [default: random] - -ConnectionFileMixin.transport : any of ``'tcp'``|``'ipc'`` (case-insensitive) - Default: ``'tcp'`` - - No description - -KernelManager.autorestart : Bool - Default: ``True`` - - Should we autorestart the kernel if it dies. - -KernelManager.connection_file : Unicode - Default: ``''`` - - JSON file in which to store connection info [default: kernel-.json] - - This file will contain the IP, ports, and authentication key needed to connect - clients to this kernel. By default, this file will be created in the security dir - of the current profile, but can be specified by absolute path. - - -KernelManager.control_port : Int - Default: ``0`` - - set the control (ROUTER) port [default: random] - -KernelManager.hb_port : Int - Default: ``0`` - - set the heartbeat port [default: random] - -KernelManager.iopub_port : Int - Default: ``0`` - - set the iopub (PUB) port [default: random] - -KernelManager.ip : Unicode - Default: ``''`` - - Set the kernel's IP address [default localhost]. - If the IP address is something other than localhost, then - Consoles on other machines will be able to connect - to the Kernel, so be careful! - -KernelManager.kernel_cmd : List - Default: ``[]`` - - DEPRECATED: Use kernel_name instead. - - The Popen Command to launch the kernel. - Override this if you have a custom kernel. - If kernel_cmd is specified in a configuration file, - Jupyter does not pass any arguments to the kernel, - because it cannot make any assumptions about the - arguments that the kernel understands. In particular, - this means that the kernel does not receive the - option --debug if it given on the Jupyter command line. - - -KernelManager.shell_port : Int - Default: ``0`` - - set the shell (ROUTER) port [default: random] - -KernelManager.shutdown_wait_time : Float - Default: ``5.0`` - - Time to wait for a kernel to terminate before killing it, in seconds. - -KernelManager.stdin_port : Int - Default: ``0`` - - set the stdin (ROUTER) port [default: random] - -KernelManager.transport : any of ``'tcp'``|``'ipc'`` (case-insensitive) - Default: ``'tcp'`` - - No description - -Session.buffer_threshold : Int - Default: ``1024`` - - Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling. - -Session.check_pid : Bool - Default: ``True`` - - Whether to check PID to protect against calls after fork. - - This check can be disabled if fork-safety is handled elsewhere. - - -Session.copy_threshold : Int - Default: ``65536`` - - Threshold (in bytes) beyond which a buffer should be sent without copying. - -Session.debug : Bool - Default: ``False`` - - Debug output in the Session - -Session.digest_history_size : Int - Default: ``65536`` - - The maximum number of digests to remember. - - The digest history will be culled when it exceeds this value. - - -Session.item_threshold : Int - Default: ``64`` - - The maximum number of items for a container to be introspected for custom serialization. - Containers larger than this are pickled outright. - - -Session.key : CBytes - Default: ``b''`` - - execution key, for signing messages. - -Session.keyfile : Unicode - Default: ``''`` - - path to file containing execution key. - -Session.metadata : Dict - Default: ``{}`` - - Metadata dictionary, which serves as the default top-level metadata dict for each message. - -Session.packer : DottedObjectName - Default: ``'json'`` - - The name of the packer for serializing messages. - Should be one of 'json', 'pickle', or an import name - for a custom callable serializer. - -Session.session : CUnicode - Default: ``''`` - - The UUID identifying this session. - -Session.signature_scheme : Unicode - Default: ``'hmac-sha256'`` - - The digest scheme used to construct the message signatures. - Must have the form 'hmac-HASH'. - -Session.unpacker : DottedObjectName - Default: ``'json'`` - - The name of the unpacker for unserializing messages. - Only used with custom functions for `packer`. - -Session.username : Unicode - Default: ``'username'`` - - Username for the Session. Default is your system username. - -MultiKernelManager.default_kernel_name : Unicode - Default: ``'python3'`` - - The name of the default kernel to start - -MultiKernelManager.kernel_manager_class : DottedObjectName - Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` - - The kernel manager class. This is configurable to allow - subclassing of the KernelManager for customized behavior. - - -MultiKernelManager.shared_context : Bool - Default: ``True`` - - Share a single zmq.Context to talk to all my kernels - -MappingKernelManager.allow_tracebacks : Bool - Default: ``True`` - - Whether to send tracebacks to clients on exceptions. - -MappingKernelManager.allowed_message_types : List - Default: ``[]`` - - White list of allowed kernel message types. - When the list is empty, all message types are allowed. - - -MappingKernelManager.buffer_offline_messages : Bool - Default: ``True`` - - Whether messages from kernels whose frontends have disconnected should be buffered in-memory. - - When True (default), messages are buffered and replayed on reconnect, - avoiding lost messages due to interrupted connectivity. - - Disable if long-running kernels will produce too much output while - no frontends are connected. - - -MappingKernelManager.cull_busy : Bool - Default: ``False`` - - Whether to consider culling kernels which are busy. - Only effective if cull_idle_timeout > 0. - -MappingKernelManager.cull_connected : Bool - Default: ``False`` - - Whether to consider culling kernels which have one or more connections. - Only effective if cull_idle_timeout > 0. - -MappingKernelManager.cull_idle_timeout : Int - Default: ``0`` - - Timeout (in seconds) after which a kernel is considered idle and ready to be culled. - Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled - for users with poor network connections. - -MappingKernelManager.cull_interval : Int - Default: ``300`` - - The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value. - -MappingKernelManager.default_kernel_name : Unicode - Default: ``'python3'`` - - The name of the default kernel to start - -MappingKernelManager.kernel_info_timeout : Float - Default: ``60`` - - Timeout for giving up on a kernel (in seconds). - - On starting and restarting kernels, we check whether the - kernel is running and responsive by sending kernel_info_requests. - This sets the timeout in seconds for how long the kernel can take - before being presumed dead. - This affects the MappingKernelManager (which handles kernel restarts) - and the ZMQChannelsHandler (which handles the startup). - - -MappingKernelManager.kernel_manager_class : DottedObjectName - Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` - - The kernel manager class. This is configurable to allow - subclassing of the KernelManager for customized behavior. - - -MappingKernelManager.root_dir : Unicode - Default: ``''`` - - No description - -MappingKernelManager.shared_context : Bool - Default: ``True`` - - Share a single zmq.Context to talk to all my kernels - -MappingKernelManager.traceback_replacement_message : Unicode - Default: ``'An exception occurred at runtime, which is not shown due to ...`` - - Message to print when allow_tracebacks is False, and an exception occurs - -KernelSpecManager.ensure_native_kernel : Bool - Default: ``True`` - - If there is no Python kernelspec registered and the IPython - kernel is available, ensure it is added to the spec list. - - -KernelSpecManager.kernel_spec_class : Type - Default: ``'jupyter_client.kernelspec.KernelSpec'`` - - The kernel spec class. This is configurable to allow - subclassing of the KernelSpecManager for customized behavior. - - -KernelSpecManager.whitelist : Set - Default: ``set()`` - - Whitelist of allowed kernel names. - - By default, all installed kernels are allowed. - - -ContentsManager.allow_hidden : Bool - Default: ``False`` - - Allow access to hidden files - -ContentsManager.checkpoints : Instance - Default: ``None`` - - No description - -ContentsManager.checkpoints_class : Type - Default: ``'jupyter_server.services.contents.checkpoints.Checkpoints'`` - - No description - -ContentsManager.checkpoints_kwargs : Dict - Default: ``{}`` - - No description - -ContentsManager.files_handler_class : Type - Default: ``'jupyter_server.files.handlers.FilesHandler'`` - - handler class to use when serving raw file requests. - - Default is a fallback that talks to the ContentsManager API, - which may be inefficient, especially for large files. - - Local files-based ContentsManagers can use a StaticFileHandler subclass, - which will be much more efficient. - - Access to these files should be Authenticated. - - -ContentsManager.files_handler_params : Dict - Default: ``{}`` - - Extra parameters to pass to files_handler_class. - - For example, StaticFileHandlers generally expect a `path` argument - specifying the root directory from which to serve files. - - -ContentsManager.hide_globs : List - Default: ``['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dyl...`` - - - Glob patterns to hide in file and directory listings. - - -ContentsManager.pre_save_hook : Any - Default: ``None`` - - Python callable or importstring thereof - - To be called on a contents model prior to save. - - This can be used to process the structure, - such as removing notebook outputs or other side effects that - should not be saved. - - It will be called as (all arguments passed by keyword):: - - hook(path=path, model=model, contents_manager=self) - - - model: the model to be saved. Includes file contents. - Modifying this dict will affect the file that is stored. - - path: the API path of the save destination - - contents_manager: this ContentsManager instance - - -ContentsManager.root_dir : Unicode - Default: ``'/'`` - - No description - -ContentsManager.untitled_directory : Unicode - Default: ``'Untitled Folder'`` - - The base name used when creating untitled directories. - -ContentsManager.untitled_file : Unicode - Default: ``'untitled'`` - - The base name used when creating untitled files. - -ContentsManager.untitled_notebook : Unicode - Default: ``'Untitled'`` - - The base name used when creating untitled notebooks. - -FileManagerMixin.use_atomic_writing : Bool - Default: ``True`` - - By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. - This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). - If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) - -FileContentsManager.allow_hidden : Bool - Default: ``False`` - - Allow access to hidden files - -FileContentsManager.checkpoints : Instance - Default: ``None`` - - No description - -FileContentsManager.checkpoints_class : Type - Default: ``'jupyter_server.services.contents.checkpoints.Checkpoints'`` - - No description - -FileContentsManager.checkpoints_kwargs : Dict - Default: ``{}`` - - No description - -FileContentsManager.delete_to_trash : Bool - Default: ``True`` - - If True (default), deleting files will send them to the - platform's trash/recycle bin, where they can be recovered. If False, - deleting files really deletes them. - -FileContentsManager.files_handler_class : Type - Default: ``'jupyter_server.files.handlers.FilesHandler'`` - - handler class to use when serving raw file requests. - - Default is a fallback that talks to the ContentsManager API, - which may be inefficient, especially for large files. - - Local files-based ContentsManagers can use a StaticFileHandler subclass, - which will be much more efficient. - - Access to these files should be Authenticated. - - -FileContentsManager.files_handler_params : Dict - Default: ``{}`` - - Extra parameters to pass to files_handler_class. - - For example, StaticFileHandlers generally expect a `path` argument - specifying the root directory from which to serve files. - - -FileContentsManager.hide_globs : List - Default: ``['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dyl...`` - - - Glob patterns to hide in file and directory listings. - - -FileContentsManager.post_save_hook : Any - Default: ``None`` - - Python callable or importstring thereof - - to be called on the path of a file just saved. - - This can be used to process the file on disk, - such as converting the notebook to a script or HTML via nbconvert. - - It will be called as (all arguments passed by keyword):: - - hook(os_path=os_path, model=model, contents_manager=instance) - - - path: the filesystem path to the file just written - - model: the model representing the file - - contents_manager: this ContentsManager instance - - -FileContentsManager.pre_save_hook : Any - Default: ``None`` - - Python callable or importstring thereof - - To be called on a contents model prior to save. - - This can be used to process the structure, - such as removing notebook outputs or other side effects that - should not be saved. - - It will be called as (all arguments passed by keyword):: - - hook(path=path, model=model, contents_manager=self) - - - model: the model to be saved. Includes file contents. - Modifying this dict will affect the file that is stored. - - path: the API path of the save destination - - contents_manager: this ContentsManager instance - - -FileContentsManager.root_dir : Unicode - Default: ``''`` - - No description - -FileContentsManager.untitled_directory : Unicode - Default: ``'Untitled Folder'`` - - The base name used when creating untitled directories. - -FileContentsManager.untitled_file : Unicode - Default: ``'untitled'`` - - The base name used when creating untitled files. - -FileContentsManager.untitled_notebook : Unicode - Default: ``'Untitled'`` - - The base name used when creating untitled notebooks. - -FileContentsManager.use_atomic_writing : Bool - Default: ``True`` - - By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. - This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). - If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) - -NotebookNotary.algorithm : any of ``'blake2s'``|``'sha512'``|``'md5'``|``'sha3_512'``|``'sha3_224'``|``'blake2b'``|``'sha384'``|``'sha1'``|``'sha3_256'``|``'sha256'``|``'sha224'``|``'sha3_384'`` - Default: ``'sha256'`` - - The hashing algorithm used to sign notebooks. - -NotebookNotary.db_file : Unicode - Default: ``''`` - - The sqlite file in which to store notebook signatures. - By default, this will be in your Jupyter data directory. - You can set it to ':memory:' to disable sqlite writing to the filesystem. - - -NotebookNotary.secret : Bytes - Default: ``b''`` - - The secret key with which notebooks are signed. - -NotebookNotary.secret_file : Unicode - Default: ``''`` - - The file where the secret key is stored. - -NotebookNotary.store_factory : Callable - Default: ``traitlets.Undefined`` - - A callable returning the storage backend for notebook signatures. - The default uses an SQLite database. - -GatewayMappingKernelManager.allow_tracebacks : Bool - Default: ``True`` - - Whether to send tracebacks to clients on exceptions. - -GatewayMappingKernelManager.allowed_message_types : List - Default: ``[]`` - - White list of allowed kernel message types. - When the list is empty, all message types are allowed. - - -GatewayMappingKernelManager.buffer_offline_messages : Bool - Default: ``True`` - - Whether messages from kernels whose frontends have disconnected should be buffered in-memory. - - When True (default), messages are buffered and replayed on reconnect, - avoiding lost messages due to interrupted connectivity. - - Disable if long-running kernels will produce too much output while - no frontends are connected. - - -GatewayMappingKernelManager.cull_busy : Bool - Default: ``False`` - - Whether to consider culling kernels which are busy. - Only effective if cull_idle_timeout > 0. - -GatewayMappingKernelManager.cull_connected : Bool - Default: ``False`` - - Whether to consider culling kernels which have one or more connections. - Only effective if cull_idle_timeout > 0. - -GatewayMappingKernelManager.cull_idle_timeout : Int - Default: ``0`` - - Timeout (in seconds) after which a kernel is considered idle and ready to be culled. - Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled - for users with poor network connections. - -GatewayMappingKernelManager.cull_interval : Int - Default: ``300`` - - The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value. - -GatewayMappingKernelManager.default_kernel_name : Unicode - Default: ``'python3'`` - - The name of the default kernel to start - -GatewayMappingKernelManager.kernel_info_timeout : Float - Default: ``60`` - - Timeout for giving up on a kernel (in seconds). - - On starting and restarting kernels, we check whether the - kernel is running and responsive by sending kernel_info_requests. - This sets the timeout in seconds for how long the kernel can take - before being presumed dead. - This affects the MappingKernelManager (which handles kernel restarts) - and the ZMQChannelsHandler (which handles the startup). - - -GatewayMappingKernelManager.kernel_manager_class : DottedObjectName - Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` - - The kernel manager class. This is configurable to allow - subclassing of the KernelManager for customized behavior. - - -GatewayMappingKernelManager.root_dir : Unicode - Default: ``''`` - - No description - -GatewayMappingKernelManager.shared_context : Bool - Default: ``True`` - - Share a single zmq.Context to talk to all my kernels - -GatewayMappingKernelManager.traceback_replacement_message : Unicode - Default: ``'An exception occurred at runtime, which is not shown due to ...`` - - Message to print when allow_tracebacks is False, and an exception occurs - -GatewayKernelSpecManager.ensure_native_kernel : Bool - Default: ``True`` - - If there is no Python kernelspec registered and the IPython - kernel is available, ensure it is added to the spec list. - - -GatewayKernelSpecManager.kernel_spec_class : Type - Default: ``'jupyter_client.kernelspec.KernelSpec'`` - - The kernel spec class. This is configurable to allow - subclassing of the KernelSpecManager for customized behavior. - - -GatewayKernelSpecManager.whitelist : Set - Default: ``set()`` - - Whitelist of allowed kernel names. - - By default, all installed kernels are allowed. - - - - -GatewayClient.auth_token : Unicode - Default: ``None`` - - The authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_TOKEN env var) - - -GatewayClient.ca_certs : Unicode - Default: ``None`` - - The filename of CA certificates or None to use defaults. (JUPYTER_GATEWAY_CA_CERTS env var) - - -GatewayClient.client_cert : Unicode - Default: ``None`` - - The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT env var) - - -GatewayClient.client_key : Unicode - Default: ``None`` - - The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) - - -GatewayClient.connect_timeout : Float - Default: ``60.0`` - - The time allowed for HTTP connection establishment with the Gateway server. - (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var) - -GatewayClient.env_whitelist : Unicode - Default: ``''`` - - A comma-separated list of environment variable names that will be included, along with - their values, in the kernel startup request. The corresponding `env_whitelist` configuration - value must also be set on the Gateway server - since that configuration value indicates which - environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var) - -GatewayClient.headers : Unicode - Default: ``'{}'`` - - Additional HTTP headers to pass on the request. This value will be converted to a dict. - (JUPYTER_GATEWAY_HEADERS env var) - - -GatewayClient.http_pwd : Unicode - Default: ``None`` - - The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) - - -GatewayClient.http_user : Unicode - Default: ``None`` - - The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) - - -GatewayClient.kernels_endpoint : Unicode - Default: ``'/api/kernels'`` - - The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var) - -GatewayClient.kernelspecs_endpoint : Unicode - Default: ``'/api/kernelspecs'`` - - The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var) - -GatewayClient.kernelspecs_resource_endpoint : Unicode - Default: ``'/kernelspecs'`` - - The gateway endpoint for accessing kernelspecs resources - (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var) - -GatewayClient.request_timeout : Float - Default: ``60.0`` - - The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var) - -GatewayClient.url : Unicode - Default: ``None`` - - The url of the Kernel or Enterprise Gateway server where - kernel specifications are defined and kernel management takes place. - If defined, this Notebook server acts as a proxy for all kernel - management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) - - -GatewayClient.validate_cert : Bool - Default: ``True`` - - For HTTPS requests, determines if server's certificate should be validated or not. - (JUPYTER_GATEWAY_VALIDATE_CERT env var) - -GatewayClient.ws_url : Unicode - Default: ``None`` - - The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value - will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) diff --git a/docs/source/users/configuration.rst b/docs/source/users/configuration.rst index 8db840c375..696cba7292 100644 --- a/docs/source/users/configuration.rst +++ b/docs/source/users/configuration.rst @@ -24,7 +24,7 @@ By default, Jupyter Server looks for server-specific configuration in a ``jupyte /Users/username/Library/Jupyter/runtime -The paths under ``config`` are listed in order of precedence. If the same trait is listed in multiple places, it will be set to the value from the file will highest precendence. +The paths under ``config`` are listed in order of precedence. If the same trait is listed in multiple places, it will be set to the value from the file with the highest precedence. Jupyter Server uses IPython's traitlets system for configuration. Traits can be diff --git a/examples/authorization/README.md b/examples/authorization/README.md index 28fe0df83f..1d9d27814b 100644 --- a/examples/authorization/README.md +++ b/examples/authorization/README.md @@ -3,8 +3,8 @@ This folder contains the following examples: 1. a "read-only" Jupyter Notebook Server -2. a read/write Server without the ability to execute code on kernels. -3. a "temporary notebook server", i.e. read and execute notebooks but cannot save/write files. +1. a read/write Server without the ability to execute code on kernels. +1. a "temporary notebook server", i.e. read and execute notebooks but cannot save/write files. ## How does it work? @@ -31,6 +31,7 @@ class MyCustomAuthorizer(Authorizer): return True + # Pass this custom class to Jupyter Server c.ServerApp.authorizer_class = MyCustomAuthorizer ``` @@ -45,13 +46,13 @@ In the `jupyter_nbclassic_readonly_config.py` pip install nbclassic -2. Navigate to the jupyter_authorized_server `examples/` folder. +1. Navigate to the jupyter_authorized_server `examples/` folder. -3. Launch nbclassic and load `jupyter_nbclassic_readonly_config.py`: +1. Launch nbclassic and load `jupyter_nbclassic_readonly_config.py`: jupyter nbclassic --config=jupyter_nbclassic_readonly_config.py -4. Try creating a notebook, running a notebook in a cell, etc. You should see a `403: Forbidden` error. +1. Try creating a notebook, running a notebook in a cell, etc. You should see a `403: Forbidden` error. ### Read+Write example @@ -59,13 +60,13 @@ In the `jupyter_nbclassic_readonly_config.py` pip install nbclassic -2. Navigate to the jupyter_authorized_server `examples/` folder. +1. Navigate to the jupyter_authorized_server `examples/` folder. -3. Launch nbclassic and load `jupyter_nbclassic_rw_config.py`: +1. Launch nbclassic and load `jupyter_nbclassic_rw_config.py`: jupyter nbclassic --config=jupyter_nbclassic_rw_config.py -4. Try running a cell in a notebook. You should see a `403: Forbidden` error. +1. Try running a cell in a notebook. You should see a `403: Forbidden` error. ### Temporary notebook server example @@ -75,10 +76,10 @@ This configuration allows everything except saving files. pip install nbclassic -2. Navigate to the jupyter_authorized_server `examples/` folder. +1. Navigate to the jupyter_authorized_server `examples/` folder. -3. Launch nbclassic and load `jupyter_temporary_config.py`: +1. Launch nbclassic and load `jupyter_temporary_config.py`: jupyter nbclassic --config=jupyter_temporary_config.py -4. Edit a notebook, run a cell, etc. Everything works fine. Then try to save your changes... you should see a `403: Forbidden` error. +1. Edit a notebook, run a cell, etc. Everything works fine. Then try to save your changes... you should see a `403: Forbidden` error. diff --git a/examples/authorization/jupyter_nbclassic_readonly_config.py b/examples/authorization/jupyter_nbclassic_readonly_config.py index 292644c284..18c1b83bfd 100644 --- a/examples/authorization/jupyter_nbclassic_readonly_config.py +++ b/examples/authorization/jupyter_nbclassic_readonly_config.py @@ -1,3 +1,4 @@ +"""Nbclassic read only auth example.""" from jupyter_server.auth import Authorizer @@ -11,4 +12,4 @@ def is_authorized(self, handler, user, action, resource): return True -c.ServerApp.authorizer_class = ReadOnly +c.ServerApp.authorizer_class = ReadOnly # type:ignore[name-defined] diff --git a/examples/authorization/jupyter_nbclassic_rw_config.py b/examples/authorization/jupyter_nbclassic_rw_config.py index 261efcf984..5dce9a8444 100644 --- a/examples/authorization/jupyter_nbclassic_rw_config.py +++ b/examples/authorization/jupyter_nbclassic_rw_config.py @@ -1,3 +1,4 @@ +"""Nbclassic read/write auth example.""" from jupyter_server.auth import Authorizer @@ -11,4 +12,4 @@ def is_authorized(self, handler, user, action, resource): return True -c.ServerApp.authorizer_class = ReadWriteOnly +c.ServerApp.authorizer_class = ReadWriteOnly # type:ignore[name-defined] diff --git a/examples/authorization/jupyter_temporary_config.py b/examples/authorization/jupyter_temporary_config.py index e1bd2fb507..dd93948c4c 100644 --- a/examples/authorization/jupyter_temporary_config.py +++ b/examples/authorization/jupyter_temporary_config.py @@ -1,3 +1,4 @@ +"""Nbclassic temporary server auth example.""" from jupyter_server.auth import Authorizer @@ -11,4 +12,4 @@ def is_authorized(self, handler, user, action, resource): return True -c.ServerApp.authorizer_class = TemporaryServerPersonality +c.ServerApp.authorizer_class = TemporaryServerPersonality # type:ignore[name-defined] diff --git a/examples/identity/system_password/README.md b/examples/identity/system_password/README.md new file mode 100644 index 0000000000..a4064a80b4 --- /dev/null +++ b/examples/identity/system_password/README.md @@ -0,0 +1,7 @@ +# Jupyter login with system password + +This `jupyter_server_config.py` defines and enables a `SystemPasswordIdentityProvider`. +This IdentityProvider checks the entered password against your system password using PAM. +Only the current user's password (the user the server is running as) is accepted. + +The result is a User whose name matches the system user, rather than a randomly generated one. diff --git a/examples/identity/system_password/jupyter_server_config.py b/examples/identity/system_password/jupyter_server_config.py new file mode 100644 index 0000000000..11364c22e8 --- /dev/null +++ b/examples/identity/system_password/jupyter_server_config.py @@ -0,0 +1,33 @@ +"""Jupyter server system password identity provider example.""" +import pwd +from getpass import getuser + +from pamela import PAMError, authenticate + +from jupyter_server.auth.identity import IdentityProvider, User + + +class SystemPasswordIdentityProvider(IdentityProvider): + """A system password identity provider.""" + + # no need to generate a default token (token can still be used, but it's opt-in) + need_token = False + + def process_login_form(self, handler): + """Process a login form.""" + username = getuser() + password = handler.get_argument("password", "") + try: + authenticate(username, password) + except PAMError as e: + self.log.error(f"Failed login for {username}: {e}") + return None + + user_info = pwd.getpwnam(username) + # get human name from pwd, if not empty + return User(username=username, name=user_info.pw_gecos or username) + + +c = get_config() # type: ignore[name-defined] + +c.ServerApp.identity_provider_class = SystemPasswordIdentityProvider diff --git a/examples/simple/README.md b/examples/simple/README.md index 2652c2da4f..dd76af4ded 100644 --- a/examples/simple/README.md +++ b/examples/simple/README.md @@ -10,7 +10,7 @@ You need `python3` to build and run the server extensions. # Clone, create a conda env and install from source. git clone https://github.com/jupyter/jupyter_server && \ cd examples/simple && \ - conda create -y -n jupyter-server-example python=3.7 && \ + conda create -y -n jupyter-server-example python=3.9 && \ conda activate jupyter-server-example && \ pip install -e .[test] ``` @@ -137,7 +137,7 @@ The content of the Config is based on the trait you have defined via the `CLI` a ## Only Extension 2 -Now stop agin the server and start with only `Extension 2`. +Now stop again the server and start with only `Extension 2`. ```bash # Start the jupyter server extension simple_ext2, it will NOT load simple_ext1 because of load_other_extensions = False. diff --git a/examples/simple/conftest.py b/examples/simple/conftest.py index c661b537d4..df81e57d34 100644 --- a/examples/simple/conftest.py +++ b/examples/simple/conftest.py @@ -1 +1,2 @@ +"""Pytest configuration.""" pytest_plugins = ["jupyter_server.pytest_plugin"] diff --git a/examples/simple/jupyter_server_config.py b/examples/simple/jupyter_server_config.py index 723d6cdadb..d850655359 100644 --- a/examples/simple/jupyter_server_config.py +++ b/examples/simple/jupyter_server_config.py @@ -1,6 +1,8 @@ -# Configuration file for jupyter-server extensions. +"""Configuration file for jupyter-server extensions.""" # ------------------------------------------------------------------------------ # Application(SingletonConfigurable) configuration # ------------------------------------------------------------------------------ # The date format used by logging formatters for %(asctime)s -c.Application.log_datefmt = "%Y-%m-%d %H:%M:%S Simple_Extensions_Example" +c.Application.log_datefmt = ( # type:ignore[name-defined] + "%Y-%m-%d %H:%M:%S Simple_Extensions_Example" +) diff --git a/examples/simple/jupyter_simple_ext11_config.py b/examples/simple/jupyter_simple_ext11_config.py index d2baa1360a..976e3df66a 100644 --- a/examples/simple/jupyter_simple_ext11_config.py +++ b/examples/simple/jupyter_simple_ext11_config.py @@ -1 +1,2 @@ -c.SimpleApp11.ignore_js = True +"""Jupyter server config.""" +c.SimpleApp11.ignore_js = True # type:ignore[name-defined] diff --git a/examples/simple/jupyter_simple_ext1_config.py b/examples/simple/jupyter_simple_ext1_config.py index f40b66afaf..6069883494 100644 --- a/examples/simple/jupyter_simple_ext1_config.py +++ b/examples/simple/jupyter_simple_ext1_config.py @@ -1,4 +1,5 @@ -c.SimpleApp1.configA = "ConfigA from file" -c.SimpleApp1.configB = "ConfigB from file" -c.SimpleApp1.configC = "ConfigC from file" -c.SimpleApp1.configD = "ConfigD from file" +"""Jupyter server config.""" +c.SimpleApp1.configA = "ConfigA from file" # type:ignore[name-defined] +c.SimpleApp1.configB = "ConfigB from file" # type:ignore[name-defined] +c.SimpleApp1.configC = "ConfigC from file" # type:ignore[name-defined] +c.SimpleApp1.configD = "ConfigD from file" # type:ignore[name-defined] diff --git a/examples/simple/jupyter_simple_ext2_config.py b/examples/simple/jupyter_simple_ext2_config.py index f145cbb87a..7b61087e1d 100644 --- a/examples/simple/jupyter_simple_ext2_config.py +++ b/examples/simple/jupyter_simple_ext2_config.py @@ -1 +1,2 @@ -c.SimpleApp2.configD = "ConfigD from file" +"""Jupyter server config.""" +c.SimpleApp2.configD = "ConfigD from file" # type:ignore[name-defined] diff --git a/examples/simple/package.json b/examples/simple/package.json index 37f76bad67..876b968a35 100644 --- a/examples/simple/package.json +++ b/examples/simple/package.json @@ -9,10 +9,8 @@ }, "dependencies": {}, "devDependencies": { - "rifraf": "2.0.3", - "webpack": "~4.29.6", - "webpack-cli": "^3.3.0", - "whatwg-fetch": "~2.0.3", - "typescript": "3.6.4" + "webpack": "^5.72.0", + "webpack-cli": "^5.0.0", + "typescript": "~4.7.3" } } diff --git a/examples/simple/pyproject.toml b/examples/simple/pyproject.toml index d4ff20627c..694b7a4f50 100644 --- a/examples/simple/pyproject.toml +++ b/examples/simple/pyproject.toml @@ -1,3 +1,38 @@ [build-system] -requires = ["jupyter_packaging~=0.5.0", "setuptools>=40.8.0", "wheel"] -build-backend = "setuptools.build_meta" +requires = ["hatchling","hatch-nodejs-version"] +build-backend = "hatchling.build" + +[project] +name = "jupyter-server-example" +description = "Jupyter Server Example" +readme = "README.md" +license = "" +requires-python = ">=3.8" +dependencies = [ + "jinja2", + "jupyter_server", +] +dynamic = ["version"] + +[project.optional-dependencies] +test = [ + "pytest", + "pytest-asyncio", +] + +[project.scripts] +jupyter-simple-ext1 = "simple_ext1.application:main" +jupyter-simple-ext11 = "simple_ext11.application:main" +jupyter-simple-ext2 = "simple_ext2.application:main" + +[tool.hatch.version] +source = "nodejs" + +[tool.hatch.build.targets.wheel.shared-data] +"etc/jupyter/jupyter_server_config.d" = "etc/jupyter/jupyter_server_config.d" + +[tool.hatch.build.hooks.jupyter-builder] +dependencies = [ + "hatch-jupyter-builder>=0.8.2", +] +build-function = "hatch_jupyter_builder.npm_builder" diff --git a/examples/simple/pytest.ini b/examples/simple/pytest.ini index 83dd817a2b..4f95513e55 100644 --- a/examples/simple/pytest.ini +++ b/examples/simple/pytest.ini @@ -1,3 +1,4 @@ [pytest] # Disable any upper exclusion. norecursedirs = +asyncio_mode = auto diff --git a/examples/simple/setup.py b/examples/simple/setup.py index 39391b1c85..b6c6681352 100644 --- a/examples/simple/setup.py +++ b/examples/simple/setup.py @@ -1,57 +1,2 @@ -import os - -from jupyter_packaging import create_cmdclass -from setuptools import setup - -VERSION = "0.0.1" - - -def get_data_files(): - """Get the data files for the package.""" - data_files = [ - ("etc/jupyter/jupyter_server_config.d", "etc/jupyter/jupyter_server_config.d/", "*.json"), - ] - - def add_data_files(path): - for (dirpath, dirnames, filenames) in os.walk(path): - if filenames: - paths = [(dirpath, dirpath, filename) for filename in filenames] - data_files.extend(paths) - - # Add all static and templates folders. - add_data_files("simple_ext1/static") - add_data_files("simple_ext1/templates") - add_data_files("simple_ext2/static") - add_data_files("simple_ext2/templates") - return data_files - - -cmdclass = create_cmdclass(data_files_spec=get_data_files()) - -setup_args = dict( - name="jupyter_server_example", - version=VERSION, - description="Jupyter Server Example", - long_description=open("README.md").read(), - python_requires=">=3.7", - install_requires=[ - "jupyter_server", - "jinja2", - ], - extras_require={ - "test": ["pytest"], - }, - include_package_data=True, - cmdclass=cmdclass, - entry_points={ - "console_scripts": [ - "jupyter-simple-ext1 = simple_ext1.application:main", - "jupyter-simple-ext11 = simple_ext11.application:main", - "jupyter-simple-ext2 = simple_ext2.application:main", - ] - }, -) - - -if __name__ == "__main__": - setup(**setup_args) +# setup.py shim for use with applications that require it. +__import__("setuptools").setup() diff --git a/examples/simple/simple_ext1/__main__.py b/examples/simple/simple_ext1/__main__.py index 317a0bd1f5..90b15cbc92 100644 --- a/examples/simple/simple_ext1/__main__.py +++ b/examples/simple/simple_ext1/__main__.py @@ -1,3 +1,4 @@ +"""Application cli main.""" from .application import main if __name__ == "__main__": diff --git a/examples/simple/simple_ext1/application.py b/examples/simple/simple_ext1/application.py index e7b05c3a6d..b77e57e4a8 100644 --- a/examples/simple/simple_ext1/application.py +++ b/examples/simple/simple_ext1/application.py @@ -1,3 +1,4 @@ +"""Jupyter server example application.""" import os from traitlets import Unicode @@ -18,6 +19,7 @@ class SimpleApp1(ExtensionAppJinjaMixin, ExtensionApp): + """A simple jupyter server application.""" # The name of the extension. name = "simple_ext1" @@ -29,10 +31,10 @@ class SimpleApp1(ExtensionAppJinjaMixin, ExtensionApp): load_other_extensions = True # Local path to static files directory. - static_paths = [DEFAULT_STATIC_FILES_PATH] + static_paths = [DEFAULT_STATIC_FILES_PATH] # type:ignore[assignment] # Local path to templates directory. - template_paths = [DEFAULT_TEMPLATE_FILES_PATH] + template_paths = [DEFAULT_TEMPLATE_FILES_PATH] # type:ignore[assignment] configA = Unicode("", config=True, help="Config A example.") @@ -41,6 +43,7 @@ class SimpleApp1(ExtensionAppJinjaMixin, ExtensionApp): configC = Unicode("", config=True, help="Config C example.") def initialize_handlers(self): + """Initialize handlers.""" self.handlers.extend( [ (rf"/{self.name}/default", DefaultHandler), @@ -53,6 +56,7 @@ def initialize_handlers(self): ) def initialize_settings(self): + """Initialize settings.""" self.log.info(f"Config {self.config}") diff --git a/examples/simple/simple_ext1/handlers.py b/examples/simple/simple_ext1/handlers.py index 82352eb0e8..9d25057bc3 100644 --- a/examples/simple/simple_ext1/handlers.py +++ b/examples/simple/simple_ext1/handlers.py @@ -1,13 +1,18 @@ +"""Jupyter server example handlers.""" +from jupyter_server.auth import authorized from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.extension.handler import ( - ExtensionHandlerJinjaMixin, - ExtensionHandlerMixin, -) +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin, ExtensionHandlerMixin from jupyter_server.utils import url_escape class DefaultHandler(ExtensionHandlerMixin, JupyterHandler): + """Default API handler.""" + + auth_resource = "simple_ext1:default" + + @authorized def get(self): + """Get the extension response.""" # The name of the extension to which this handler is linked. self.log.info(f"Extension Name in {self.name} Default Handler: {self.name}") # A method for getting the url to static files (prefixed with /static/). @@ -19,13 +24,19 @@ def get(self): class RedirectHandler(ExtensionHandlerMixin, JupyterHandler): + """A redirect handler.""" + def get(self): + """Handle a redirect.""" self.redirect(f"/static/{self.name}/favicon.ico") class ParameterHandler(ExtensionHandlerMixin, JupyterHandler): + """A parameterized handler.""" + def get(self, matched_part=None, *args, **kwargs): - var1 = self.get_argument("var1", default=None) + """Handle a get with parameters.""" + var1 = self.get_argument("var1", default="") components = [x for x in self.request.path.split("/") if x] self.write("

Hello Simple App 1 from Handler.

") self.write(f"

matched_part: {url_escape(matched_part)}

") @@ -34,21 +45,28 @@ def get(self, matched_part=None, *args, **kwargs): class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): - pass + """The base template handler.""" class TypescriptHandler(BaseTemplateHandler): + """A typescript handler.""" + def get(self): + """Get the typescript template.""" self.write(self.render_template("typescript.html")) class TemplateHandler(BaseTemplateHandler): + """A template handler.""" + def get(self, path): """Optionally, you can print(self.get_template('simple1.html'))""" self.write(self.render_template("simple1.html", path=path)) class ErrorHandler(BaseTemplateHandler): + """An error handler.""" + def get(self, path): - # write_error renders template from error.html file. + """Write_error renders template from error.html file.""" self.write_error(400) diff --git a/examples/simple/simple_ext1/static/bundle.js b/examples/simple/simple_ext1/static/bundle.js deleted file mode 100644 index 9017d3883d..0000000000 --- a/examples/simple/simple_ext1/static/bundle.js +++ /dev/null @@ -1,144 +0,0 @@ -/******/ (function (modules) { - // webpackBootstrap - /******/ // The module cache - /******/ var installedModules = {}; // The require function - /******/ - /******/ /******/ function __webpack_require__(moduleId) { - /******/ - /******/ // Check if module is in cache - /******/ if (installedModules[moduleId]) { - /******/ return installedModules[moduleId].exports; - /******/ - } // Create a new module (and put it into the cache) - /******/ /******/ var module = (installedModules[moduleId] = { - /******/ i: moduleId, - /******/ l: false, - /******/ exports: {}, - /******/ - }); // Execute the module function - /******/ - /******/ /******/ modules[moduleId].call( - module.exports, - module, - module.exports, - __webpack_require__ - ); // Flag the module as loaded - /******/ - /******/ /******/ module.l = true; // Return the exports of the module - /******/ - /******/ /******/ return module.exports; - /******/ - } // expose the modules object (__webpack_modules__) - /******/ - /******/ - /******/ /******/ __webpack_require__.m = modules; // expose the module cache - /******/ - /******/ /******/ __webpack_require__.c = installedModules; // define getter function for harmony exports - /******/ - /******/ /******/ __webpack_require__.d = function (exports, name, getter) { - /******/ if (!__webpack_require__.o(exports, name)) { - /******/ Object.defineProperty(exports, name, { - enumerable: true, - get: getter, - }); - /******/ - } - /******/ - }; // define __esModule on exports - /******/ - /******/ /******/ __webpack_require__.r = function (exports) { - /******/ if (typeof Symbol !== "undefined" && Symbol.toStringTag) { - /******/ Object.defineProperty(exports, Symbol.toStringTag, { - value: "Module", - }); - /******/ - } - /******/ Object.defineProperty(exports, "__esModule", { value: true }); - /******/ - }; // create a fake namespace object // mode & 1: value is a module id, require it // mode & 2: merge all properties of value into the ns // mode & 4: return value when already ns object // mode & 8|1: behave like require - /******/ - /******/ /******/ /******/ /******/ /******/ /******/ __webpack_require__.t = - function (value, mode) { - /******/ if (mode & 1) value = __webpack_require__(value); - /******/ if (mode & 8) return value; - /******/ if ( - mode & 4 && - typeof value === "object" && - value && - value.__esModule - ) - return value; - /******/ var ns = Object.create(null); - /******/ __webpack_require__.r(ns); - /******/ Object.defineProperty(ns, "default", { - enumerable: true, - value: value, - }); - /******/ if (mode & 2 && typeof value != "string") - for (var key in value) - __webpack_require__.d( - ns, - key, - function (key) { - return value[key]; - }.bind(null, key) - ); - /******/ return ns; - /******/ - }; // getDefaultExport function for compatibility with non-harmony modules - /******/ - /******/ /******/ __webpack_require__.n = function (module) { - /******/ var getter = - module && module.__esModule - ? /******/ function getDefault() { - return module["default"]; - } - : /******/ function getModuleExports() { - return module; - }; - /******/ __webpack_require__.d(getter, "a", getter); - /******/ return getter; - /******/ - }; // Object.prototype.hasOwnProperty.call - /******/ - /******/ /******/ __webpack_require__.o = function (object, property) { - return Object.prototype.hasOwnProperty.call(object, property); - }; // __webpack_public_path__ - /******/ - /******/ /******/ __webpack_require__.p = ""; // Load entry module and return exports - /******/ - /******/ - /******/ /******/ return __webpack_require__((__webpack_require__.s = 0)); - /******/ -})( - /************************************************************************/ - /******/ { - /***/ "./simple_ext1/static/index.js": - /*!*************************************!*\ - !*** ./simple_ext1/static/index.js ***! - \*************************************/ - /*! no static exports found */ - /***/ function (module, exports) { - eval( - 'function main() {\n let div = document.getElementById("mydiv");\n div.innerText = "Hello from Typescript";\n}\nwindow.addEventListener(\'load\', main);\n\n\n//# sourceURL=webpack:///./simple_ext1/static/index.js?' - ); - - /***/ - }, - - /***/ 0: - /*!*******************************************!*\ - !*** multi ./simple_ext1/static/index.js ***! - \*******************************************/ - /*! no static exports found */ - /***/ function (module, exports, __webpack_require__) { - eval( - 'module.exports = __webpack_require__(/*! ./simple_ext1/static/index.js */"./simple_ext1/static/index.js");\n\n\n//# sourceURL=webpack:///multi_./simple_ext1/static/index.js?' - ); - - /***/ - }, - - /******/ - } -); diff --git a/examples/simple/simple_ext1/static/index.js b/examples/simple/simple_ext1/static/index.js index 4cc84b9bc3..a6c59f1086 100644 --- a/examples/simple/simple_ext1/static/index.js +++ b/examples/simple/simple_ext1/static/index.js @@ -1,5 +1,5 @@ function main() { - let div = document.getElementById("mydiv"); - div.innerText = "Hello from Typescript"; + let div = document.getElementById("mydiv"); + div.innerText = "Hello from Typescript"; } window.addEventListener("load", main); diff --git a/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo b/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo index 8167ef00a2..27452c1246 100644 --- a/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo +++ b/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo @@ -50,7 +50,7 @@ "signature": "3e0a459888f32b42138d5a39f706ff2d55d500ab1031e0988b5568b0f67c2303" }, "../../src/index.ts": { - "version": "fd4f62325debd29128c1990caa4d546f2c48c21ea133fbcbb3e29f9fbef55e49", + "version": "a5398b1577287a9a5a7e190a9a7283ee67b12fcc0dbc6d2cac55ef25ed166bb2", "signature": "ed4b087ea2a2e4a58647864cf512c7534210bfc2f9d236a2f9ed5245cf7a0896" } }, diff --git a/examples/simple/simple_ext11/__init__.py b/examples/simple/simple_ext11/__init__.py index abe0f73a2a..d7c3e4341b 100644 --- a/examples/simple/simple_ext11/__init__.py +++ b/examples/simple/simple_ext11/__init__.py @@ -1,3 +1,4 @@ +"""Extension entry point.""" from .application import SimpleApp11 diff --git a/examples/simple/simple_ext11/__main__.py b/examples/simple/simple_ext11/__main__.py index 317a0bd1f5..90b15cbc92 100644 --- a/examples/simple/simple_ext11/__main__.py +++ b/examples/simple/simple_ext11/__main__.py @@ -1,3 +1,4 @@ +"""Application cli main.""" from .application import main if __name__ == "__main__": diff --git a/examples/simple/simple_ext11/application.py b/examples/simple/simple_ext11/application.py index fb4e6f846f..398716f213 100644 --- a/examples/simple/simple_ext11/application.py +++ b/examples/simple/simple_ext11/application.py @@ -1,6 +1,7 @@ +"""A Jupyter Server example application.""" import os -from simple_ext1.application import SimpleApp1 +from simple_ext1.application import SimpleApp1 # type:ignore[import-not-found] from traitlets import Bool, Unicode, observe from jupyter_server.serverapp import aliases, flags @@ -10,6 +11,8 @@ class SimpleApp11(SimpleApp1): + """A simple application.""" + flags["hello"] = ({"SimpleApp11": {"hello": True}}, "Say hello on startup.") aliases.update( { @@ -20,7 +23,7 @@ class SimpleApp11(SimpleApp1): # The name of the extension. name = "simple_ext11" - # Te url that your extension will serve its homepage. + # The url that your extension will serve its homepage. extension_url = "/simple_ext11/default" # Local path to static files directory. @@ -53,6 +56,7 @@ def simple11_dir_formatted(self): return "/" + self.simple11_dir def initialize_settings(self): + """Initialize settings.""" self.log.info(f"hello: {self.hello}") if self.hello is True: self.log.info( @@ -62,6 +66,7 @@ def initialize_settings(self): super().initialize_settings() def initialize_handlers(self): + """Initialize handlers.""" super().initialize_handlers() diff --git a/examples/simple/simple_ext2/__init__.py b/examples/simple/simple_ext2/__init__.py index ffe7bc43c3..3059dbda49 100644 --- a/examples/simple/simple_ext2/__init__.py +++ b/examples/simple/simple_ext2/__init__.py @@ -1,3 +1,4 @@ +"""The extension entry point.""" from .application import SimpleApp2 diff --git a/examples/simple/simple_ext2/__main__.py b/examples/simple/simple_ext2/__main__.py index 317a0bd1f5..465db9c1c2 100644 --- a/examples/simple/simple_ext2/__main__.py +++ b/examples/simple/simple_ext2/__main__.py @@ -1,3 +1,4 @@ +"""The application cli main.""" from .application import main if __name__ == "__main__": diff --git a/examples/simple/simple_ext2/application.py b/examples/simple/simple_ext2/application.py index 5ca3fac882..b9da358131 100644 --- a/examples/simple/simple_ext2/application.py +++ b/examples/simple/simple_ext2/application.py @@ -1,3 +1,4 @@ +"""A simple Jupyter Server extension example.""" import os from traitlets import Unicode @@ -11,25 +12,27 @@ class SimpleApp2(ExtensionAppJinjaMixin, ExtensionApp): + """A simple application.""" # The name of the extension. name = "simple_ext2" - # Te url that your extension will serve its homepage. + # The url that your extension will serve its homepage. extension_url = "/simple_ext2" # Should your extension expose other server extensions when launched directly? load_other_extensions = True # Local path to static files directory. - static_paths = [DEFAULT_STATIC_FILES_PATH] + static_paths = [DEFAULT_STATIC_FILES_PATH] # type:ignore[assignment] # Local path to templates directory. - template_paths = [DEFAULT_TEMPLATE_FILES_PATH] + template_paths = [DEFAULT_TEMPLATE_FILES_PATH] # type:ignore[assignment] configD = Unicode("", config=True, help="Config D example.") def initialize_handlers(self): + """Initialize handlers.""" self.handlers.extend( [ (r"/simple_ext2/params/(.+)$", ParameterHandler), @@ -40,6 +43,7 @@ def initialize_handlers(self): ) def initialize_settings(self): + """Initialize settings.""" self.log.info(f"Config {self.config}") diff --git a/examples/simple/simple_ext2/handlers.py b/examples/simple/simple_ext2/handlers.py index acd908cfb5..4f52e6f061 100644 --- a/examples/simple/simple_ext2/handlers.py +++ b/examples/simple/simple_ext2/handlers.py @@ -1,14 +1,15 @@ +"""API handlers for the Jupyter Server example.""" from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.extension.handler import ( - ExtensionHandlerJinjaMixin, - ExtensionHandlerMixin, -) +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin, ExtensionHandlerMixin from jupyter_server.utils import url_escape class ParameterHandler(ExtensionHandlerMixin, JupyterHandler): + """A parameterized handler.""" + def get(self, matched_part=None, *args, **kwargs): - var1 = self.get_argument("var1", default=None) + """Get a parameterized response.""" + var1 = self.get_argument("var1", default="") components = [x for x in self.request.path.split("/") if x] self.write("

Hello Simple App 2 from Handler.

") self.write(f"

matched_part: {url_escape(matched_part)}

") @@ -17,20 +18,28 @@ def get(self, matched_part=None, *args, **kwargs): class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): - pass + """A base template handler.""" class IndexHandler(BaseTemplateHandler): + """The root API handler.""" + def get(self): + """Get the root response.""" self.write(self.render_template("index.html")) class TemplateHandler(BaseTemplateHandler): + """A template handler.""" + def get(self, path): - print(self.get_template("simple_ext2.html")) + """Get the template for the path.""" self.write(self.render_template("simple_ext2.html", path=path)) class ErrorHandler(BaseTemplateHandler): + """An error handler.""" + def get(self, path): + """Handle the error.""" self.write_error(400) diff --git a/examples/simple/tests/test_handlers.py b/examples/simple/tests/test_handlers.py index a46bb2b868..59b9d045ae 100644 --- a/examples/simple/tests/test_handlers.py +++ b/examples/simple/tests/test_handlers.py @@ -1,20 +1,43 @@ +"""Tests for the simple handler.""" import pytest -@pytest.fixture -def jp_server_config(jp_template_dir): +@pytest.fixture() +def jp_server_auth_resources(jp_server_auth_core_resources): + """The server auth resources.""" + for url_regex in [ + "/simple_ext1/default", + ]: + jp_server_auth_core_resources[url_regex] = "simple_ext1:default" + return jp_server_auth_core_resources + + +@pytest.fixture() +def jp_server_config(jp_template_dir, jp_server_authorizer): + """The server config.""" return { - "ServerApp": {"jpserver_extensions": {"simple_ext1": True}}, + "ServerApp": { + "jpserver_extensions": {"simple_ext1": True}, + "authorizer_class": jp_server_authorizer, + }, } -async def test_handler_default(jp_fetch): +async def test_handler_default(jp_fetch, jp_serverapp): + """Test the default handler.""" + jp_serverapp.authorizer.permissions = { + "actions": ["read"], + "resources": [ + "simple_ext1:default", + ], + } r = await jp_fetch("simple_ext1/default", method="GET") assert r.code == 200 assert r.body.decode().index("Hello Simple 1 - I am the default...") > -1 async def test_handler_template(jp_fetch): + """Test the template handler.""" path = "/custom/path" r = await jp_fetch(f"simple_ext1/template1/{path}", method="GET") assert r.code == 200 @@ -22,10 +45,12 @@ async def test_handler_template(jp_fetch): async def test_handler_typescript(jp_fetch): + """Test the typescript handler.""" r = await jp_fetch("simple_ext1/typescript", method="GET") assert r.code == 200 async def test_handler_error(jp_fetch): + """Test the error handler.""" r = await jp_fetch("simple_ext1/nope", method="GET") assert r.body.decode().index("400 : Bad Request") > -1 diff --git a/examples/simple/webpack.config.js b/examples/simple/webpack.config.js index c0f4735649..5acc57fa89 100644 --- a/examples/simple/webpack.config.js +++ b/examples/simple/webpack.config.js @@ -3,6 +3,7 @@ module.exports = { output: { path: require("path").join(__dirname, "simple_ext1", "static"), filename: "bundle.js", + hashFunction: 'sha256' }, mode: "development", }; diff --git a/jupyter_server/__init__.py b/jupyter_server/__init__.py index d5b97f0c90..3d85bbd2c8 100644 --- a/jupyter_server/__init__.py +++ b/jupyter_server/__init__.py @@ -1,7 +1,6 @@ """The Jupyter Server""" import os -import subprocess -import sys +import pathlib DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_PATH_LIST = [ @@ -10,20 +9,19 @@ ] DEFAULT_JUPYTER_SERVER_PORT = 8888 - -del os - -from ._version import __version__, version_info # noqa - - -def _cleanup(): - pass - - -# patch subprocess on Windows for python<3.7 -# see https://bugs.python.org/issue37380 -# the fix for python3.7: https://github.com/python/cpython/pull/15706/files -if sys.platform == "win32": - if sys.version_info < (3, 7): - subprocess._cleanup = _cleanup - subprocess._active = None +JUPYTER_SERVER_EVENTS_URI = "https://events.jupyter.org/jupyter_server" +DEFAULT_EVENTS_SCHEMA_PATH = pathlib.Path(__file__).parent / "event_schemas" + +from ._version import __version__, version_info +from .base.call_context import CallContext + +__all__ = [ + "DEFAULT_STATIC_FILES_PATH", + "DEFAULT_TEMPLATE_PATH_LIST", + "DEFAULT_JUPYTER_SERVER_PORT", + "JUPYTER_SERVER_EVENTS_URI", + "DEFAULT_EVENTS_SCHEMA_PATH", + "__version__", + "version_info", + "CallContext", +] diff --git a/jupyter_server/__main__.py b/jupyter_server/__main__.py index 6ada4be7ea..70a8ef34bd 100644 --- a/jupyter_server/__main__.py +++ b/jupyter_server/__main__.py @@ -1,3 +1,5 @@ +"""The main entry point for Jupyter Server.""" + if __name__ == "__main__": from jupyter_server import serverapp as app diff --git a/jupyter_server/_sysinfo.py b/jupyter_server/_sysinfo.py index a0a430bb2c..f167c4e92a 100644 --- a/jupyter_server/_sysinfo.py +++ b/jupyter_server/_sysinfo.py @@ -73,17 +73,17 @@ def pkg_info(pkg_path): with named parameters of interest """ src, hsh = pkg_commit_hash(pkg_path) - return dict( - jupyter_server_version=jupyter_server.__version__, - jupyter_server_path=pkg_path, - commit_source=src, - commit_hash=hsh, - sys_version=sys.version, - sys_executable=sys.executable, - sys_platform=sys.platform, - platform=platform.platform(), - os_name=os.name, - ) + return { + "jupyter_server_version": jupyter_server.__version__, + "jupyter_server_path": pkg_path, + "commit_source": src, + "commit_hash": hsh, + "sys_version": sys.version, + "sys_executable": sys.executable, + "sys_platform": sys.platform, + "platform": platform.platform(), + "os_name": os.name, + } def get_sys_info(): diff --git a/jupyter_server/_tz.py b/jupyter_server/_tz.py index 41d8222889..a7a495de85 100644 --- a/jupyter_server/_tz.py +++ b/jupyter_server/_tz.py @@ -5,7 +5,9 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -from datetime import datetime, timedelta, tzinfo +from __future__ import annotations + +from datetime import datetime, timedelta, timezone, tzinfo # constant for zero offset ZERO = timedelta(0) @@ -14,31 +16,28 @@ class tzUTC(tzinfo): """tzinfo object for UTC (zero offset)""" - def utcoffset(self, d): + def utcoffset(self, d: datetime | None) -> timedelta: + """Compute utcoffset.""" return ZERO - def dst(self, d): + def dst(self, d: datetime | None) -> timedelta: + """Compute dst.""" return ZERO -UTC = tzUTC() - - -def utc_aware(unaware): - """decorator for adding UTC tzinfo to datetime's utcfoo methods""" +def utcnow() -> datetime: + """Return timezone-aware UTC timestamp""" + return datetime.now(timezone.utc) - def utc_method(*args, **kwargs): - dt = unaware(*args, **kwargs) - return dt.replace(tzinfo=UTC) - return utc_method +def utcfromtimestamp(timestamp: float) -> datetime: + return datetime.fromtimestamp(timestamp, timezone.utc) -utcfromtimestamp = utc_aware(datetime.utcfromtimestamp) -utcnow = utc_aware(datetime.utcnow) +UTC = tzUTC() # type:ignore[abstract] -def isoformat(dt): +def isoformat(dt: datetime) -> str: """Return iso-formatted timestamp Like .isoformat(), but uses Z for UTC instead of +00:00 diff --git a/jupyter_server/_version.py b/jupyter_server/_version.py index 77a1118f9b..bef2f8e281 100644 --- a/jupyter_server/_version.py +++ b/jupyter_server/_version.py @@ -2,5 +2,17 @@ store the current version info of the server. """ -version_info = (1, 16, 1, ".dev", "0") -__version__ = ".".join(map(str, version_info[:3])) + "".join(version_info[3:]) +import re +from typing import List + +# Version string must appear intact for automatic versioning +__version__ = "2.11.2" + +# Build up version_info tuple for backwards compatibility +pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" +match = re.match(pattern, __version__) +assert match is not None +parts: List[object] = [int(match[part]) for part in ["major", "minor", "patch"]] +if match["rest"]: + parts.append(match["rest"]) +version_info = tuple(parts) diff --git a/jupyter_server/auth/__init__.py b/jupyter_server/auth/__init__.py index 54477ffd1b..36418f214b 100644 --- a/jupyter_server/auth/__init__.py +++ b/jupyter_server/auth/__init__.py @@ -1,3 +1,4 @@ -from .authorizer import * # noqa -from .decorator import authorized # noqa -from .security import passwd # noqa +from .authorizer import * +from .decorator import authorized +from .identity import * +from .security import passwd diff --git a/jupyter_server/auth/__main__.py b/jupyter_server/auth/__main__.py index b34a3189c1..d1573f11a1 100644 --- a/jupyter_server/auth/__main__.py +++ b/jupyter_server/auth/__main__.py @@ -1,22 +1,27 @@ +"""The cli for auth.""" import argparse import sys +import warnings from getpass import getpass from jupyter_core.paths import jupyter_config_dir +from traitlets.log import get_logger -from jupyter_server.auth import passwd +from jupyter_server.auth import passwd # type:ignore[attr-defined] from jupyter_server.config_manager import BaseJSONConfigManager def set_password(args): + """Set a password.""" password = args.password + while not password: password1 = getpass("" if args.quiet else "Provide password: ") password_repeat = getpass("" if args.quiet else "Repeat password: ") if password1 != password_repeat: - print("Passwords do not match, try again") + warnings.warn("Passwords do not match, try again", stacklevel=2) elif len(password1) < 4: - print("Please provide at least 4 characters") + warnings.warn("Please provide at least 4 characters", stacklevel=2) else: password = password1 @@ -31,10 +36,12 @@ def set_password(args): }, ) if not args.quiet: - print("password stored in config dir: %s" % jupyter_config_dir()) + log = get_logger() + log.info("password stored in config dir: %s" % jupyter_config_dir()) def main(argv): + """The main cli handler.""" parser = argparse.ArgumentParser(argv[0]) subparsers = parser.add_subparsers() parser_password = subparsers.add_parser( diff --git a/jupyter_server/auth/authorizer.py b/jupyter_server/auth/authorizer.py index 952cb0278d..f22dbe5463 100644 --- a/jupyter_server/auth/authorizer.py +++ b/jupyter_server/auth/authorizer.py @@ -7,9 +7,17 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from traitlets import Instance from traitlets.config import LoggingConfigurable -from jupyter_server.base.handlers import JupyterHandler +from .identity import IdentityProvider, User + +if TYPE_CHECKING: + from jupyter_server.base.handlers import JupyterHandler class Authorizer(LoggingConfigurable): @@ -18,10 +26,10 @@ class Authorizer(LoggingConfigurable): All authorizers used in Jupyter Server should inherit from this base class and, at the very minimum, - implement an `is_authorized` method with the + implement an ``is_authorized`` method with the same signature as in this base class. - The `is_authorized` method is called by the `@authorized` decorator + The ``is_authorized`` method is called by the ``@authorized`` decorator in JupyterHandler. If it returns True, the incoming request to the server is accepted; if it returns False, the server returns a 403 (Forbidden) error code. @@ -32,23 +40,30 @@ class Authorizer(LoggingConfigurable): .. versionadded:: 2.0 """ - def is_authorized(self, handler: JupyterHandler, user: str, action: str, resource: str) -> bool: - """A method to determine if `user` is authorized to perform `action` - (read, write, or execute) on the `resource` type. + identity_provider = Instance(IdentityProvider) + + def is_authorized( + self, handler: JupyterHandler, user: User, action: str, resource: str + ) -> bool: + """A method to determine if ``user`` is authorized to perform ``action`` + (read, write, or execute) on the ``resource`` type. Parameters ---------- - user : usually a dict or string - A truthy model representing the authenticated user. - A username string by default, - but usually a dict when integrating with an auth provider. + user : jupyter_server.auth.User + An object representing the authenticated user, + as returned by :meth:`jupyter_server.auth.IdentityProvider.get_user`. + action : str the category of action for the current request: read, write, or execute. resource : str the type of resource (i.e. contents, kernels, files, etc.) the user is requesting. - Returns True if user authorized to make request; otherwise, returns False. + Returns + ------- + bool + True if user authorized to make request; False, otherwise """ raise NotImplementedError() @@ -61,7 +76,9 @@ class AllowAllAuthorizer(Authorizer): .. versionadded:: 2.0 """ - def is_authorized(self, handler: JupyterHandler, user: str, action: str, resource: str) -> bool: + def is_authorized( + self, handler: JupyterHandler, user: User, action: str, resource: str + ) -> bool: """This method always returns True. All authenticated users are allowed to do anything in the Jupyter Server. diff --git a/jupyter_server/auth/decorator.py b/jupyter_server/auth/decorator.py index 72a489dbe9..fd38cda1e7 100644 --- a/jupyter_server/auth/decorator.py +++ b/jupyter_server/auth/decorator.py @@ -3,19 +3,21 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from functools import wraps -from typing import Callable, Optional, Union +from typing import Any, Callable, Optional, TypeVar, Union, cast from tornado.log import app_log from tornado.web import HTTPError -from .utils import HTTP_METHOD_TO_AUTH_ACTION, warn_disabled_authorization +from .utils import HTTP_METHOD_TO_AUTH_ACTION + +FuncT = TypeVar("FuncT", bound=Callable[..., Any]) def authorized( - action: Optional[Union[str, Callable]] = None, + action: Optional[Union[str, FuncT]] = None, resource: Optional[str] = None, message: Optional[str] = None, -) -> Callable: +) -> FuncT: """A decorator for tornado.web.RequestHandler methods that verifies whether the current user is authorized to make the following request. @@ -57,18 +59,13 @@ def inner(self, *args, **kwargs): if not user: app_log.warning("Attempting to authorize request without authentication!") raise HTTPError(status_code=403, log_message=message) - - # Handle the case where an authorizer wasn't attached to the handler. - if not self.authorizer: - warn_disabled_authorization() - return method(self, *args, **kwargs) - - # Only return the method if the action is authorized. + # If the user is allowed to do this action, + # call the method. if self.authorizer.is_authorized(self, user, action, resource): return method(self, *args, **kwargs) - - # Raise an exception if the method wasn't returned (i.e. not authorized) - raise HTTPError(status_code=403, log_message=message) + # else raise an exception. + else: + raise HTTPError(status_code=403, log_message=message) return inner @@ -76,6 +73,6 @@ def inner(self, *args, **kwargs): method = action action = None # no-arguments `@authorized` decorator called - return wrapper(method) + return cast(FuncT, wrapper(method)) - return wrapper + return cast(FuncT, wrapper) diff --git a/jupyter_server/auth/identity.py b/jupyter_server/auth/identity.py new file mode 100644 index 0000000000..adeb567b5b --- /dev/null +++ b/jupyter_server/auth/identity.py @@ -0,0 +1,738 @@ +"""Identity Provider interface + +This defines the _authentication_ layer of Jupyter Server, +to be used in combination with Authorizer for _authorization_. + +.. versionadded:: 2.0 +""" +from __future__ import annotations + +import binascii +import datetime +import json +import os +import re +import sys +import typing as t +import uuid +from dataclasses import asdict, dataclass +from http.cookies import Morsel + +from tornado import escape, httputil, web +from traitlets import Bool, Dict, Type, Unicode, default +from traitlets.config import LoggingConfigurable + +from jupyter_server.transutils import _i18n + +from .security import passwd_check, set_password +from .utils import get_anonymous_username + +_non_alphanum = re.compile(r"[^A-Za-z0-9]") + + +@dataclass +class User: + """Object representing a User + + This or a subclass should be returned from IdentityProvider.get_user + """ + + username: str # the only truly required field + + # these fields are filled from username if not specified + # name is the 'real' name of the user + name: str = "" + # display_name is a shorter name for us in UI, + # if different from name. e.g. a nickname + display_name: str = "" + + # these fields are left as None if undefined + initials: str | None = None + avatar_url: str | None = None + color: str | None = None + + # TODO: extension fields? + # ext: Dict[str, Dict[str, Any]] = field(default_factory=dict) + + def __post_init__(self): + self.fill_defaults() + + def fill_defaults(self): + """Fill out default fields in the identity model + + - Ensures all values are defined + - Fills out derivative values for name fields fields + - Fills out null values for optional fields + """ + + # username is the only truly required field + if not self.username: + msg = f"user.username must not be empty: {self}" + raise ValueError(msg) + + # derive name fields from username -> name -> display name + if not self.name: + self.name = self.username + if not self.display_name: + self.display_name = self.name + + +def _backward_compat_user(got_user: t.Any) -> User: + """Backward-compatibility for LoginHandler.get_user + + Prior to 2.0, LoginHandler.get_user could return anything truthy. + + Typically, this was either a simple string username, + or a simple dict. + + Make some effort to allow common patterns to keep working. + """ + if isinstance(got_user, str): + return User(username=got_user) + elif isinstance(got_user, dict): + kwargs = {} + if "username" not in got_user and "name" in got_user: + kwargs["username"] = got_user["name"] + for field in User.__dataclass_fields__: + if field in got_user: + kwargs[field] = got_user[field] + try: + return User(**kwargs) + except TypeError: + msg = f"Unrecognized user: {got_user}" + raise ValueError(msg) from None + else: + msg = f"Unrecognized user: {got_user}" + raise ValueError(msg) + + +class IdentityProvider(LoggingConfigurable): + """ + Interface for providing identity management and authentication. + + Two principle methods: + + - :meth:`~jupyter_server.auth.IdentityProvider.get_user` returns a :class:`~.User` object + for successful authentication, or None for no-identity-found. + - :meth:`~jupyter_server.auth.IdentityProvider.identity_model` turns a :class:`~jupyter_server.auth.User` into a JSONable dict. + The default is to use :py:meth:`dataclasses.asdict`, + and usually shouldn't need override. + + Additional methods can customize authentication. + + .. versionadded:: 2.0 + """ + + cookie_name: str | Unicode[str, str | bytes] = Unicode( + "", + config=True, + help=_i18n("Name of the cookie to set for persisting login. Default: username-${Host}."), + ) + + cookie_options = Dict( + config=True, + help=_i18n( + "Extra keyword arguments to pass to `set_secure_cookie`." + " See tornado's set_secure_cookie docs for details." + ), + ) + + secure_cookie: bool | Bool[bool | None, bool | int | None] = Bool( + None, + allow_none=True, + config=True, + help=_i18n( + "Specify whether login cookie should have the `secure` property (HTTPS-only)." + "Only needed when protocol-detection gives the wrong answer due to proxies." + ), + ) + + get_secure_cookie_kwargs = Dict( + config=True, + help=_i18n( + "Extra keyword arguments to pass to `get_secure_cookie`." + " See tornado's get_secure_cookie docs for details." + ), + ) + + token: str | Unicode[str, str | bytes] = Unicode( + "", + help=_i18n( + """Token used for authenticating first-time connections to the server. + + The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly + with the JUPYTER_TOKEN environment variable. + + When no password is enabled, + the default is to generate a new, random token. + + Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. + + Prior to 2.0: configured as ServerApp.token + """ + ), + ).tag(config=True) + + login_handler_class = Type( + default_value="jupyter_server.auth.login.LoginFormHandler", + klass=web.RequestHandler, + config=True, + help=_i18n("The login handler class to use, if any."), + ) + + logout_handler_class = Type( + default_value="jupyter_server.auth.logout.LogoutHandler", + klass=web.RequestHandler, + config=True, + help=_i18n("The logout handler class to use."), + ) + + token_generated = False + + @default("token") + def _token_default(self): + if os.getenv("JUPYTER_TOKEN"): + self.token_generated = False + return os.environ["JUPYTER_TOKEN"] + if os.getenv("JUPYTER_TOKEN_FILE"): + self.token_generated = False + with open(os.environ["JUPYTER_TOKEN_FILE"]) as token_file: + return token_file.read() + if not self.need_token: + # no token if password is enabled + self.token_generated = False + return "" + else: + self.token_generated = True + return binascii.hexlify(os.urandom(24)).decode("ascii") + + need_token: bool | Bool[bool, t.Union[bool, int]] = Bool(True) + + def get_user(self, handler: web.RequestHandler) -> User | None | t.Awaitable[User | None]: + """Get the authenticated user for a request + + Must return a :class:`jupyter_server.auth.User`, + though it may be a subclass. + + Return None if the request is not authenticated. + + _may_ be a coroutine + """ + return self._get_user(handler) + + # not sure how to have optional-async type signature + # on base class with `async def` without splitting it into two methods + + async def _get_user(self, handler: web.RequestHandler) -> User | None: + """Get the user.""" + if getattr(handler, "_jupyter_current_user", None): + # already authenticated + return t.cast(User, handler._jupyter_current_user) # type:ignore[attr-defined] + _token_user: User | None | t.Awaitable[User | None] = self.get_user_token(handler) + if isinstance(_token_user, t.Awaitable): + _token_user = await _token_user + token_user: User | None = _token_user # need second variable name to collapse type + _cookie_user = self.get_user_cookie(handler) + if isinstance(_cookie_user, t.Awaitable): + _cookie_user = await _cookie_user + cookie_user: User | None = _cookie_user + # prefer token to cookie if both given, + # because token is always explicit + user = token_user or cookie_user + + if user is not None and token_user is not None: + # if token-authenticated, persist user_id in cookie + # if it hasn't already been stored there + if user != cookie_user: + self.set_login_cookie(handler, user) + # Record that the current request has been authenticated with a token. + # Used in is_token_authenticated above. + handler._token_authenticated = True # type:ignore[attr-defined] + + if user is None: + # If an invalid cookie was sent, clear it to prevent unnecessary + # extra warnings. But don't do this on a request with *no* cookie, + # because that can erroneously log you out (see gh-3365) + cookie_name = self.get_cookie_name(handler) + cookie = handler.get_cookie(cookie_name) + if cookie is not None: + self.log.warning(f"Clearing invalid/expired login cookie {cookie_name}") + self.clear_login_cookie(handler) + if not self.auth_enabled: + # Completely insecure! No authentication at all. + # No need to warn here, though; validate_security will have already done that. + user = self.generate_anonymous_user(handler) + # persist user on first request + # so the user data is stable for a given browser session + self.set_login_cookie(handler, user) + + return user + + def identity_model(self, user: User) -> dict[str, t.Any]: + """Return a User as an Identity model""" + # TODO: validate? + return asdict(user) + + def get_handlers(self) -> list[tuple[str, object]]: + """Return list of additional handlers for this identity provider + + For example, an OAuth callback handler. + """ + handlers = [] + if self.login_available: + handlers.append((r"/login", self.login_handler_class)) + if self.logout_available: + handlers.append((r"/logout", self.logout_handler_class)) + return handlers + + def user_to_cookie(self, user: User) -> str: + """Serialize a user to a string for storage in a cookie + + If overriding in a subclass, make sure to define user_from_cookie as well. + + Default is just the user's username. + """ + # default: username is enough + cookie = json.dumps( + { + "username": user.username, + "name": user.name, + "display_name": user.display_name, + "initials": user.initials, + "color": user.color, + } + ) + return cookie + + def user_from_cookie(self, cookie_value: str) -> User | None: + """Inverse of user_to_cookie""" + user = json.loads(cookie_value) + return User( + user["username"], + user["name"], + user["display_name"], + user["initials"], + None, + user["color"], + ) + + def get_cookie_name(self, handler: web.RequestHandler) -> str: + """Return the login cookie name + + Uses IdentityProvider.cookie_name, if defined. + Default is to generate a string taking host into account to avoid + collisions for multiple servers on one hostname with different ports. + """ + if self.cookie_name: + return self.cookie_name + else: + return _non_alphanum.sub("-", f"username-{handler.request.host}") + + def set_login_cookie(self, handler: web.RequestHandler, user: User) -> None: + """Call this on handlers to set the login cookie for success""" + cookie_options = {} + cookie_options.update(self.cookie_options) + cookie_options.setdefault("httponly", True) + # tornado <4.2 has a bug that considers secure==True as soon as + # 'secure' kwarg is passed to set_secure_cookie + secure_cookie = self.secure_cookie + if secure_cookie is None: + secure_cookie = handler.request.protocol == "https" + if secure_cookie: + cookie_options.setdefault("secure", True) + cookie_options.setdefault("path", handler.base_url) # type:ignore[attr-defined] + cookie_name = self.get_cookie_name(handler) + handler.set_secure_cookie(cookie_name, self.user_to_cookie(user), **cookie_options) + + def _force_clear_cookie( + self, handler: web.RequestHandler, name: str, path: str = "/", domain: str | None = None + ) -> None: + """Deletes the cookie with the given name. + + Tornado's cookie handling currently (Jan 2018) stores cookies in a dict + keyed by name, so it can only modify one cookie with a given name per + response. The browser can store multiple cookies with the same name + but different domains and/or paths. This method lets us clear multiple + cookies with the same name. + + Due to limitations of the cookie protocol, you must pass the same + path and domain to clear a cookie as were used when that cookie + was set (but there is no way to find out on the server side + which values were used for a given cookie). + """ + name = escape.native_str(name) + expires = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=365) + + morsel: Morsel[t.Any] = Morsel() + morsel.set(name, "", '""') + morsel["expires"] = httputil.format_timestamp(expires) + morsel["path"] = path + if domain: + morsel["domain"] = domain + handler.add_header("Set-Cookie", morsel.OutputString()) + + def clear_login_cookie(self, handler: web.RequestHandler) -> None: + """Clear the login cookie, effectively logging out the session.""" + cookie_options = {} + cookie_options.update(self.cookie_options) + path = cookie_options.setdefault("path", handler.base_url) # type:ignore[attr-defined] + cookie_name = self.get_cookie_name(handler) + handler.clear_cookie(cookie_name, path=path) + if path and path != "/": + # also clear cookie on / to ensure old cookies are cleared + # after the change in path behavior. + # N.B. This bypasses the normal cookie handling, which can't update + # two cookies with the same name. See the method above. + self._force_clear_cookie(handler, cookie_name) + + def get_user_cookie( + self, handler: web.RequestHandler + ) -> User | None | t.Awaitable[User | None]: + """Get user from a cookie + + Calls user_from_cookie to deserialize cookie value + """ + _user_cookie = handler.get_secure_cookie( + self.get_cookie_name(handler), + **self.get_secure_cookie_kwargs, + ) + if not _user_cookie: + return None + user_cookie = _user_cookie.decode() + # TODO: try/catch in case of change in config? + try: + return self.user_from_cookie(user_cookie) + except Exception as e: + # log bad cookie itself, only at debug-level + self.log.debug(f"Error unpacking user from cookie: cookie={user_cookie}", exc_info=True) + self.log.error(f"Error unpacking user from cookie: {e}") + return None + + auth_header_pat = re.compile(r"(token|bearer)\s+(.+)", re.IGNORECASE) + + def get_token(self, handler: web.RequestHandler) -> str | None: + """Get the user token from a request + + Default: + + - in URL parameters: ?token= + - in header: Authorization: token + """ + user_token = handler.get_argument("token", "") + if not user_token: + # get it from Authorization header + m = self.auth_header_pat.match(handler.request.headers.get("Authorization", "")) + if m: + user_token = m.group(2) + return user_token + + async def get_user_token(self, handler: web.RequestHandler) -> User | None: + """Identify the user based on a token in the URL or Authorization header + + Returns: + - uuid if authenticated + - None if not + """ + token = t.cast("str | None", handler.token) # type:ignore[attr-defined] + if not token: + return None + # check login token from URL argument or Authorization header + user_token = self.get_token(handler) + authenticated = False + if user_token == token: + # token-authenticated, set the login cookie + self.log.debug( + "Accepting token-authenticated request from %s", + handler.request.remote_ip, + ) + authenticated = True + + if authenticated: + # token does not correspond to user-id, + # which is stored in a cookie. + # still check the cookie for the user id + _user = self.get_user_cookie(handler) + if isinstance(_user, t.Awaitable): + _user = await _user + user: User | None = _user + if user is None: + user = self.generate_anonymous_user(handler) + return user + else: + return None + + def generate_anonymous_user(self, handler: web.RequestHandler) -> User: + """Generate a random anonymous user. + + For use when a single shared token is used, + but does not identify a user. + """ + user_id = uuid.uuid4().hex + moon = get_anonymous_username() + name = display_name = f"Anonymous {moon}" + initials = f"A{moon[0]}" + color = None + handler.log.debug(f"Generating new user for token-authenticated request: {user_id}") # type:ignore[attr-defined] + return User(user_id, name, display_name, initials, None, color) + + def should_check_origin(self, handler: web.RequestHandler) -> bool: + """Should the Handler check for CORS origin validation? + + Origin check should be skipped for token-authenticated requests. + + Returns: + - True, if Handler must check for valid CORS origin. + - False, if Handler should skip origin check since requests are token-authenticated. + """ + return not self.is_token_authenticated(handler) + + def is_token_authenticated(self, handler: web.RequestHandler) -> bool: + """Returns True if handler has been token authenticated. Otherwise, False. + + Login with a token is used to signal certain things, such as: + + - permit access to REST API + - xsrf protection + - skip origin-checks for scripts + """ + # ensure get_user has been called, so we know if we're token-authenticated + handler.current_user # noqa: B018 + return getattr(handler, "_token_authenticated", False) + + def validate_security( + self, + app: t.Any, + ssl_options: dict[str, t.Any] | None = None, + ) -> None: + """Check the application's security. + + Show messages, or abort if necessary, based on the security configuration. + """ + if not app.ip: + warning = "WARNING: The Jupyter server is listening on all IP addresses" + if ssl_options is None: + app.log.warning(f"{warning} and not using encryption. This is not recommended.") + if not self.auth_enabled: + app.log.warning( + f"{warning} and not using authentication. " + "This is highly insecure and not recommended." + ) + elif not self.auth_enabled: + app.log.warning( + "All authentication is disabled." + " Anyone who can connect to this server will be able to run code." + ) + + def process_login_form(self, handler: web.RequestHandler) -> User | None: + """Process login form data + + Return authenticated User if successful, None if not. + """ + typed_password = handler.get_argument("password", default="") + user = None + if not self.auth_enabled: + self.log.warning("Accepting anonymous login because auth fully disabled!") + return self.generate_anonymous_user(handler) + + if self.token and self.token == typed_password: + return t.cast(User, self.user_for_token(typed_password)) # type:ignore[attr-defined] + + return user + + @property + def auth_enabled(self): + """Is authentication enabled? + + Should always be True, but may be False in rare, insecure cases + where requests with no auth are allowed. + + Previously: LoginHandler.get_login_available + """ + return True + + @property + def login_available(self): + """Whether a LoginHandler is needed - and therefore whether the login page should be displayed.""" + return self.auth_enabled + + @property + def logout_available(self): + """Whether a LogoutHandler is needed.""" + return True + + +class PasswordIdentityProvider(IdentityProvider): + """A password identity provider.""" + + hashed_password = Unicode( + "", + config=True, + help=_i18n( + """ + Hashed password to use for web authentication. + + To generate, type in a python/IPython shell: + + from jupyter_server.auth import passwd; passwd() + + The string should be of the form type:salt:hashed-password. + """ + ), + ) + + password_required = Bool( + False, + config=True, + help=_i18n( + """ + Forces users to use a password for the Jupyter server. + This is useful in a multi user environment, for instance when + everybody in the LAN can access each other's machine through ssh. + + In such a case, serving on localhost is not secure since + any user can connect to the Jupyter server via ssh. + + """ + ), + ) + + allow_password_change = Bool( + True, + config=True, + help=_i18n( + """ + Allow password to be changed at login for the Jupyter server. + + While logging in with a token, the Jupyter server UI will give the opportunity to + the user to enter a new password at the same time that will replace + the token login mechanism. + + This can be set to False to prevent changing password from the UI/API. + """ + ), + ) + + @default("need_token") + def _need_token_default(self): + return not bool(self.hashed_password) + + @property + def login_available(self) -> bool: + """Whether a LoginHandler is needed - and therefore whether the login page should be displayed.""" + return self.auth_enabled + + @property + def auth_enabled(self) -> bool: + """Return whether any auth is enabled""" + return bool(self.hashed_password or self.token) + + def passwd_check(self, password): + """Check password against our stored hashed password""" + return passwd_check(self.hashed_password, password) + + def process_login_form(self, handler: web.RequestHandler) -> User | None: + """Process login form data + + Return authenticated User if successful, None if not. + """ + typed_password = handler.get_argument("password", default="") + new_password = handler.get_argument("new_password", default="") + user = None + if not self.auth_enabled: + self.log.warning("Accepting anonymous login because auth fully disabled!") + return self.generate_anonymous_user(handler) + + if self.passwd_check(typed_password) and not new_password: + return self.generate_anonymous_user(handler) + elif self.token and self.token == typed_password: + user = self.generate_anonymous_user(handler) + if new_password and self.allow_password_change: + config_dir = handler.settings.get("config_dir", "") + config_file = os.path.join(config_dir, "jupyter_server_config.json") + self.hashed_password = set_password(new_password, config_file=config_file) + self.log.info(_i18n(f"Wrote hashed password to {config_file}")) + + return user + + def validate_security( + self, + app: t.Any, + ssl_options: dict[str, t.Any] | None = None, + ) -> None: + """Handle security validation.""" + super().validate_security(app, ssl_options) + if self.password_required and (not self.hashed_password): + self.log.critical( + _i18n("Jupyter servers are configured to only be run with a password.") + ) + self.log.critical(_i18n("Hint: run the following command to set a password")) + self.log.critical(_i18n("\t$ python -m jupyter_server.auth password")) + sys.exit(1) + + +class LegacyIdentityProvider(PasswordIdentityProvider): + """Legacy IdentityProvider for use with custom LoginHandlers + + Login configuration has moved from LoginHandler to IdentityProvider + in Jupyter Server 2.0. + """ + + # settings must be passed for + settings = Dict() + + @default("settings") + def _default_settings(self): + return { + "token": self.token, + "password": self.hashed_password, + } + + @default("login_handler_class") + def _default_login_handler_class(self): + from .login import LegacyLoginHandler + + return LegacyLoginHandler + + @property + def auth_enabled(self): + return self.login_available + + def get_user(self, handler: web.RequestHandler) -> User | None: + """Get the user.""" + user = self.login_handler_class.get_user(handler) # type:ignore[attr-defined] + if user is None: + return None + return _backward_compat_user(user) + + @property + def login_available(self) -> bool: + return bool( + self.login_handler_class.get_login_available( # type:ignore[attr-defined] + self.settings + ) + ) + + def should_check_origin(self, handler: web.RequestHandler) -> bool: + """Whether we should check origin.""" + return bool(self.login_handler_class.should_check_origin(handler)) # type:ignore[attr-defined] + + def is_token_authenticated(self, handler: web.RequestHandler) -> bool: + """Whether we are token authenticated.""" + return bool(self.login_handler_class.is_token_authenticated(handler)) # type:ignore[attr-defined] + + def validate_security( + self, + app: t.Any, + ssl_options: dict[str, t.Any] | None = None, + ) -> None: + """Validate security.""" + if self.password_required and (not self.hashed_password): + self.log.critical( + _i18n("Jupyter servers are configured to only be run with a password.") + ) + self.log.critical(_i18n("Hint: run the following command to set a password")) + self.log.critical(_i18n("\t$ python -m jupyter_server.auth password")) + sys.exit(1) + self.login_handler_class.validate_security( # type:ignore[attr-defined] + app, ssl_options + ) diff --git a/jupyter_server/auth/login.py b/jupyter_server/auth/login.py index 382077d9e0..22832df341 100644 --- a/jupyter_server/auth/login.py +++ b/jupyter_server/auth/login.py @@ -12,13 +12,14 @@ from .security import passwd_check, set_password -class LoginHandler(JupyterHandler): +class LoginFormHandler(JupyterHandler): """The basic tornado login handler - authenticates with a hashed password from the configuration. + accepts login form, passed to IdentityProvider.process_login_form. """ def _render(self, message=None): + """Render the login form.""" self.write( self.render_template( "login.html", @@ -40,12 +41,25 @@ def _redirect_safe(self, url, default=None): # \ is not valid in urls, but some browsers treat it as / # instead of %5C, causing `\\` to behave as `//` url = url.replace("\\", "%5C") + # urllib and browsers interpret extra '/' in the scheme separator (`scheme:///host/path`) + # differently. + # urllib gives scheme=scheme, netloc='', path='/host/path', while + # browsers get scheme=scheme, netloc='host', path='/path' + # so make sure ':///*' collapses to '://' by splitting and stripping any additional leading slash + # don't allow any kind of `:/` shenanigans by splitting on ':' only + # and replacing `:/*` with exactly `://` + if ":" in url: + scheme, _, rest = url.partition(":") + url = f"{scheme}://{rest.lstrip('/')}" parsed = urlparse(url) - if parsed.netloc or not (parsed.path + "/").startswith(self.base_url): + # full url may be `//host/path` (empty scheme == same scheme as request) + # or `https://host/path` + # or even `https:///host/path` (invalid, but accepted and ambiguously interpreted) + if (parsed.scheme or parsed.netloc) or not (parsed.path + "/").startswith(self.base_url): # require that next_url be absolute path within our path allow = False # OR pass our cross-origin check - if parsed.netloc: + if parsed.scheme or parsed.netloc: # if full URL, run our cross-origin check: origin = f"{parsed.scheme}://{parsed.netloc}" origin = origin.lower() @@ -60,20 +74,44 @@ def _redirect_safe(self, url, default=None): self.redirect(url) def get(self): + """Get the login form.""" if self.current_user: next_url = self.get_argument("next", default=self.base_url) self._redirect_safe(next_url) else: self._render() + def post(self): + """Post a login.""" + user = self.current_user = self.identity_provider.process_login_form(self) + if user is None: + self.set_status(401) + self._render(message={"error": "Invalid credentials"}) + return + + self.log.info(f"User {user.username} logged in.") + self.identity_provider.set_login_cookie(self, user) + next_url = self.get_argument("next", default=self.base_url) + self._redirect_safe(next_url) + + +class LegacyLoginHandler(LoginFormHandler): + """Legacy LoginHandler, implementing most custom auth configuration. + + Deprecated in jupyter-server 2.0. + Login configuration has moved to IdentityProvider. + """ + @property def hashed_password(self): return self.password_from_settings(self.settings) def passwd_check(self, a, b): + """Check a passwd.""" return passwd_check(a, b) def post(self): + """Post a login form.""" typed_password = self.get_argument("password", default="") new_password = self.get_argument("new_password", default="") @@ -82,10 +120,13 @@ def post(self): self.set_login_cookie(self, uuid.uuid4().hex) elif self.token and self.token == typed_password: self.set_login_cookie(self, uuid.uuid4().hex) - if new_password and self.settings.get("allow_password_change"): - config_dir = self.settings.get("config_dir") + if new_password and getattr(self.identity_provider, "allow_password_change", False): + config_dir = self.settings.get("config_dir", "") config_file = os.path.join(config_dir, "jupyter_server_config.json") - set_password(new_password, config_file=config_file) + if hasattr(self.identity_provider, "hashed_password"): + self.identity_provider.hashed_password = self.settings[ + "password" + ] = set_password(new_password, config_file=config_file) self.log.info("Wrote hashed password to %s" % config_file) else: self.set_status(401) @@ -130,52 +171,38 @@ def get_token(cls, handler): @classmethod def should_check_origin(cls, handler): - """Should the Handler check for CORS origin validation? - - Origin check should be skipped for token-authenticated requests. - - Returns: - - True, if Handler must check for valid CORS origin. - - False, if Handler should skip origin check since requests are token-authenticated. - """ + """DEPRECATED in 2.0, use IdentityProvider API""" return not cls.is_token_authenticated(handler) @classmethod def is_token_authenticated(cls, handler): - """Returns True if handler has been token authenticated. Otherwise, False. - - Login with a token is used to signal certain things, such as: - - - permit access to REST API - - xsrf protection - - skip origin-checks for scripts - """ + """DEPRECATED in 2.0, use IdentityProvider API""" if getattr(handler, "_user_id", None) is None: # ensure get_user has been called, so we know if we're token-authenticated - handler.get_current_user() + handler.current_user # noqa: B018 return getattr(handler, "_token_authenticated", False) @classmethod def get_user(cls, handler): - """Called by handlers.get_current_user for identifying the current user. - - See tornado.web.RequestHandler.get_current_user for details. - """ + """DEPRECATED in 2.0, use IdentityProvider API""" # Can't call this get_current_user because it will collide when # called on LoginHandler itself. if getattr(handler, "_user_id", None): return handler._user_id - user_id = cls.get_user_token(handler) - if user_id is None: - get_secure_cookie_kwargs = handler.settings.get("get_secure_cookie_kwargs", {}) - user_id = handler.get_secure_cookie(handler.cookie_name, **get_secure_cookie_kwargs) - if user_id: - user_id = user_id.decode() - else: - cls.set_login_cookie(handler, user_id) + token_user_id = cls.get_user_token(handler) + cookie_user_id = cls.get_user_cookie(handler) + # prefer token to cookie if both given, + # because token is always explicit + user_id = token_user_id or cookie_user_id + if token_user_id: + # if token-authenticated, persist user_id in cookie + # if it hasn't already been stored there + if user_id != cookie_user_id: + cls.set_login_cookie(handler, user_id) # Record that the current request has been authenticated with a token. # Used in is_token_authenticated above. handler._token_authenticated = True + if user_id is None: # If an invalid cookie was sent, clear it to prevent unnecessary # extra warnings. But don't do this on a request with *no* cookie, @@ -193,16 +220,20 @@ def get_user(cls, handler): return user_id @classmethod - def get_user_token(cls, handler): - """Identify the user based on a token in the URL or Authorization header + def get_user_cookie(cls, handler): + """DEPRECATED in 2.0, use IdentityProvider API""" + get_secure_cookie_kwargs = handler.settings.get("get_secure_cookie_kwargs", {}) + user_id = handler.get_secure_cookie(handler.cookie_name, **get_secure_cookie_kwargs) + if user_id: + user_id = user_id.decode() + return user_id - Returns: - - uuid if authenticated - - None if not - """ + @classmethod + def get_user_token(cls, handler): + """DEPRECATED in 2.0, use IdentityProvider API""" token = handler.token if not token: - return + return None # check login token from URL argument or Authorization header user_token = cls.get_token(handler) authenticated = False @@ -215,16 +246,23 @@ def get_user_token(cls, handler): authenticated = True if authenticated: - return uuid.uuid4().hex + # token does not correspond to user-id, + # which is stored in a cookie. + # still check the cookie for the user id + user_id = cls.get_user_cookie(handler) + if user_id is None: + # no cookie, generate new random user_id + user_id = uuid.uuid4().hex + handler.log.info( + f"Generating new user_id for token-authenticated request: {user_id}" + ) + return user_id else: return None @classmethod def validate_security(cls, app, ssl_options=None): - """Check the application's security. - - Show messages, or abort if necessary, based on the security configuration. - """ + """DEPRECATED in 2.0, use IdentityProvider API""" if not app.ip: warning = "WARNING: The Jupyter server is listening on all IP addresses" if ssl_options is None: @@ -234,22 +272,23 @@ def validate_security(cls, app, ssl_options=None): f"{warning} and not using authentication. " "This is highly insecure and not recommended." ) - else: - if not app.password and not app.token: - app.log.warning( - "All authentication is disabled." - " Anyone who can connect to this server will be able to run code." - ) + elif not app.password and not app.token: + app.log.warning( + "All authentication is disabled." + " Anyone who can connect to this server will be able to run code." + ) @classmethod def password_from_settings(cls, settings): - """Return the hashed password from the tornado settings. - - If there is no configured password, an empty string will be returned. - """ + """DEPRECATED in 2.0, use IdentityProvider API""" return settings.get("password", "") @classmethod def get_login_available(cls, settings): - """Whether this LoginHandler is needed - and therefore whether the login page should be displayed.""" + """DEPRECATED in 2.0, use IdentityProvider API""" + return bool(cls.password_from_settings(settings) or settings.get("token")) + + +# deprecated import, so deprecated implementations get the Legacy class instead +LoginHandler = LegacyLoginHandler diff --git a/jupyter_server/auth/logout.py b/jupyter_server/auth/logout.py index abe23425c9..3db7f796ba 100644 --- a/jupyter_server/auth/logout.py +++ b/jupyter_server/auth/logout.py @@ -6,8 +6,11 @@ class LogoutHandler(JupyterHandler): + """An auth logout handler.""" + def get(self): - self.clear_login_cookie() + """Handle a logout.""" + self.identity_provider.clear_login_cookie(self) if self.login_available: message = {"info": "Successfully logged out."} else: diff --git a/jupyter_server/auth/security.py b/jupyter_server/auth/security.py index fa7dded7fb..a5ae185f1e 100644 --- a/jupyter_server/auth/security.py +++ b/jupyter_server/auth/security.py @@ -11,7 +11,8 @@ from contextlib import contextmanager from jupyter_core.paths import jupyter_config_dir -from traitlets.config import Config, ConfigFileNotFound, JSONFileConfigLoader +from traitlets.config import Config +from traitlets.config.loader import ConfigFileNotFound, JSONFileConfigLoader # Length of the salt in nr of hex chars, which implies salt_len * 4 # bits of randomness. @@ -51,10 +52,10 @@ def passwd(passphrase=None, algorithm="argon2"): if p0 == p1: passphrase = p0 break - else: - print("Passwords do not match.") + warnings.warn("Passwords do not match.", stacklevel=2) else: - raise ValueError("No matching passwords found. Giving up.") + msg = "No matching passwords found. Giving up." + raise ValueError(msg) if algorithm == "argon2": import argon2 @@ -64,9 +65,9 @@ def passwd(passphrase=None, algorithm="argon2"): time_cost=10, parallelism=8, ) - h = ph.hash(passphrase) + h_ph = ph.hash(passphrase) - return ":".join((algorithm, h)) + return ":".join((algorithm, h_ph)) h = hashlib.new(algorithm) salt = ("%0" + str(salt_len) + "x") % random.getrandbits(4 * salt_len) @@ -160,7 +161,9 @@ def persist_config(config_file=None, mode=0o600): os.chmod(config_file, mode) except Exception: tb = traceback.format_exc() - warnings.warn(f"Failed to set permissions on {config_file}:\n{tb}", RuntimeWarning) + warnings.warn( + f"Failed to set permissions on {config_file}:\n{tb}", RuntimeWarning, stacklevel=2 + ) def set_password(password=None, config_file=None): @@ -169,4 +172,5 @@ def set_password(password=None, config_file=None): hashed_password = passwd(password) with persist_config(config_file) as config: - config.ServerApp.password = hashed_password + config.IdentityProvider.hashed_password = hashed_password + return hashed_password diff --git a/jupyter_server/auth/utils.py b/jupyter_server/auth/utils.py index b939b87ae0..b0f790be1f 100644 --- a/jupyter_server/auth/utils.py +++ b/jupyter_server/auth/utils.py @@ -3,21 +3,17 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import importlib +import random import re import warnings def warn_disabled_authorization(): + """DEPRECATED, does nothing""" warnings.warn( - "The Tornado web application does not have an 'authorizer' defined " - "in its settings. In future releases of jupyter_server, this will " - "be a required key for all subclasses of `JupyterHandler`. For an " - "example, see the jupyter_server source code for how to " - "add an authorizer to the tornado settings: " - "https://github.com/jupyter-server/jupyter_server/blob/" - "653740cbad7ce0c8a8752ce83e4d3c2c754b13cb/jupyter_server/serverapp.py" - "#L234-L256", - FutureWarning, + "jupyter_server.auth.utils.warn_disabled_authorization is deprecated", + DeprecationWarning, + stacklevel=2, ) @@ -44,9 +40,9 @@ def get_regex_to_resource_map(): from jupyter_server.serverapp import JUPYTER_SERVICE_HANDLERS modules = [] - for mod in JUPYTER_SERVICE_HANDLERS.values(): - if mod: - modules.extend(mod) + for mod_name in JUPYTER_SERVICE_HANDLERS.values(): + if mod_name: + modules.extend(mod_name) resource_map = {} for handler_module in modules: mod = importlib.import_module(handler_module) @@ -79,3 +75,95 @@ def match_url_to_resource(url, regex_mapping=None): pattern = re.compile(regex) if pattern.fullmatch(url): return auth_resource + + +# From https://en.wikipedia.org/wiki/Moons_of_Jupiter +moons_of_jupyter = [ + "Metis", + "Adrastea", + "Amalthea", + "Thebe", + "Io", + "Europa", + "Ganymede", + "Callisto", + "Themisto", + "Leda", + "Ersa", + "Pandia", + "Himalia", + "Lysithea", + "Elara", + "Dia", + "Carpo", + "Valetudo", + "Euporie", + "Eupheme", + # 'S/2003 J 18', + # 'S/2010 J 2', + "Helike", + # 'S/2003 J 16', + # 'S/2003 J 2', + "Euanthe", + # 'S/2017 J 7', + "Hermippe", + "Praxidike", + "Thyone", + "Thelxinoe", + # 'S/2017 J 3', + "Ananke", + "Mneme", + # 'S/2016 J 1', + "Orthosie", + "Harpalyke", + "Iocaste", + # 'S/2017 J 9', + # 'S/2003 J 12', + # 'S/2003 J 4', + "Erinome", + "Aitne", + "Herse", + "Taygete", + # 'S/2017 J 2', + # 'S/2017 J 6', + "Eukelade", + "Carme", + # 'S/2003 J 19', + "Isonoe", + # 'S/2003 J 10', + "Autonoe", + "Philophrosyne", + "Cyllene", + "Pasithee", + # 'S/2010 J 1', + "Pasiphae", + "Sponde", + # 'S/2017 J 8', + "Eurydome", + # 'S/2017 J 5', + "Kalyke", + "Hegemone", + "Kale", + "Kallichore", + # 'S/2011 J 1', + # 'S/2017 J 1', + "Chaldene", + "Arche", + "Eirene", + "Kore", + # 'S/2011 J 2', + # 'S/2003 J 9', + "Megaclite", + "Aoede", + # 'S/2003 J 23', + "Callirrhoe", + "Sinope", +] + + +def get_anonymous_username() -> str: + """ + Get a random user-name based on the moons of Jupyter. + This function returns names like "Anonymous Io" or "Anonymous Metis". + """ + return moons_of_jupyter[random.randint(0, len(moons_of_jupyter) - 1)] diff --git a/jupyter_server/base/call_context.py b/jupyter_server/base/call_context.py new file mode 100644 index 0000000000..3d989121c2 --- /dev/null +++ b/jupyter_server/base/call_context.py @@ -0,0 +1,88 @@ +"""Provides access to variables pertaining to specific call contexts.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. + +from contextvars import Context, ContextVar, copy_context +from typing import Any, Dict, List + + +class CallContext: + """CallContext essentially acts as a namespace for managing context variables. + + Although not required, it is recommended that any "file-spanning" context variable + names (i.e., variables that will be set or retrieved from multiple files or services) be + added as constants to this class definition. + """ + + # Add well-known (file-spanning) names here. + #: Provides access to the current request handler once set. + JUPYTER_HANDLER: str = "JUPYTER_HANDLER" + + # A map of variable name to value is maintained as the single ContextVar. This also enables + # easier management over maintaining a set of ContextVar instances, since the Context is a + # map of ContextVar instances to their values, and the "name" is no longer a lookup key. + _NAME_VALUE_MAP = "_name_value_map" + _name_value_map: ContextVar[Dict[str, Any]] = ContextVar(_NAME_VALUE_MAP) + + @classmethod + def get(cls, name: str) -> Any: + """Returns the value corresponding the named variable relative to this context. + + If the named variable doesn't exist, None will be returned. + + Parameters + ---------- + name : str + The name of the variable to get from the call context + + Returns + ------- + value: Any + The value associated with the named variable for this call context + """ + name_value_map = CallContext._get_map() + + if name in name_value_map: + return name_value_map[name] + return None # TODO - should this raise `LookupError` (or a custom error derived from said) + + @classmethod + def set(cls, name: str, value: Any) -> None: + """Sets the named variable to the specified value in the current call context. + + Parameters + ---------- + name : str + The name of the variable to store into the call context + value : Any + The value of the variable to store into the call context + + Returns + ------- + None + """ + name_value_map = CallContext._get_map() + name_value_map[name] = value + + @classmethod + def context_variable_names(cls) -> List[str]: + """Returns a list of variable names set for this call context. + + Returns + ------- + names: List[str] + A list of variable names set for this call context. + """ + name_value_map = CallContext._get_map() + return list(name_value_map.keys()) + + @classmethod + def _get_map(cls) -> Dict[str, Any]: + """Get the map of names to their values from the _NAME_VALUE_MAP context var. + + If the map does not exist in the current context, an empty map is created and returned. + """ + ctx: Context = copy_context() + if CallContext._name_value_map not in ctx: + CallContext._name_value_map.set({}) + return CallContext._name_value_map.get() diff --git a/jupyter_server/base/handlers.py b/jupyter_server/base/handlers.py index 42f7fb3d5e..b1b783cca9 100644 --- a/jupyter_server/base/handlers.py +++ b/jupyter_server/base/handlers.py @@ -1,30 +1,35 @@ """Base Tornado handlers for the Jupyter server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import datetime +from __future__ import annotations + import functools +import inspect import ipaddress import json import mimetypes import os import re -import traceback import types import warnings from http.client import responses -from http.cookies import Morsel +from logging import Logger +from typing import TYPE_CHECKING, Any, Awaitable, Sequence, cast from urllib.parse import urlparse import prometheus_client from jinja2 import TemplateNotFound from jupyter_core.paths import is_hidden -from tornado import escape, httputil, web +from jupyter_events import EventLogger +from tornado import web from tornado.log import app_log from traitlets.config import Application import jupyter_server +from jupyter_server import CallContext from jupyter_server._sysinfo import get_sys_info from jupyter_server._tz import utcnow +from jupyter_server.auth.decorator import authorized from jupyter_server.i18n import combine_translations from jupyter_server.services.security import csp_report_uri from jupyter_server.utils import ( @@ -36,24 +41,38 @@ urldecode_unix_socket_path, ) +if TYPE_CHECKING: + from jupyter_client.kernelspec import KernelSpecManager + from jupyter_server_terminals.terminalmanager import TerminalManager + from tornado.concurrent import Future + + from jupyter_server.auth.authorizer import Authorizer + from jupyter_server.auth.identity import IdentityProvider, User + from jupyter_server.serverapp import ServerApp + from jupyter_server.services.config.manager import ConfigManager + from jupyter_server.services.contents.manager import ContentsManager + from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager + from jupyter_server.services.sessions.sessionmanager import SessionManager + # ----------------------------------------------------------------------------- # Top-level handlers # ----------------------------------------------------------------------------- -non_alphanum = re.compile(r"[^A-Za-z0-9]") _sys_info_cache = None def json_sys_info(): - global _sys_info_cache + """Get sys info as json.""" + global _sys_info_cache # noqa: PLW0603 if _sys_info_cache is None: _sys_info_cache = json.dumps(get_sys_info()) return _sys_info_cache -def log(): +def log() -> Logger: + """Get the application log.""" if Application.initialized(): - return Application.instance().log + return cast(Logger, Application.instance().log) else: return app_log @@ -62,14 +81,18 @@ class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" @property - def content_security_policy(self): + def base_url(self) -> str: + return cast(str, self.settings.get("base_url", "/")) + + @property + def content_security_policy(self) -> str: """The default Content-Security-Policy header Can be overridden by defining Content-Security-Policy in settings['headers'] """ if "Content-Security-Policy" in self.settings.get("headers", {}): # user-specified, don't override - return self.settings["headers"]["Content-Security-Policy"] + return cast(str, self.settings["headers"]["Content-Security-Policy"]) return "; ".join( [ @@ -80,7 +103,8 @@ def content_security_policy(self): ] ) - def set_default_headers(self): + def set_default_headers(self) -> None: + """Set the default headers.""" headers = {} headers["X-Content-Type-Options"] = "nosniff" headers.update(self.settings.get("headers", {})) @@ -95,50 +119,62 @@ def set_default_headers(self): # tornado raise Exception (not a subclass) # if method is unsupported (websocket and Access-Control-Allow-Origin # for example, so just ignore) - self.log.debug(e) - - def force_clear_cookie(self, name, path="/", domain=None): - """Deletes the cookie with the given name. + self.log.exception( # type:ignore[attr-defined] + "Could not set default headers: %s", e + ) - Tornado's cookie handling currently (Jan 2018) stores cookies in a dict - keyed by name, so it can only modify one cookie with a given name per - response. The browser can store multiple cookies with the same name - but different domains and/or paths. This method lets us clear multiple - cookies with the same name. + @property + def cookie_name(self) -> str: + warnings.warn( + """JupyterHandler.login_handler is deprecated in 2.0, + use JupyterHandler.identity_provider. + """, + DeprecationWarning, + stacklevel=2, + ) + return self.identity_provider.get_cookie_name(self) + + def force_clear_cookie(self, name: str, path: str = "/", domain: str | None = None) -> None: + """Force a cookie clear.""" + warnings.warn( + """JupyterHandler.login_handler is deprecated in 2.0, + use JupyterHandler.identity_provider. + """, + DeprecationWarning, + stacklevel=2, + ) + self.identity_provider._force_clear_cookie(self, name, path=path, domain=domain) + + def clear_login_cookie(self) -> None: + """Clear a login cookie.""" + warnings.warn( + """JupyterHandler.login_handler is deprecated in 2.0, + use JupyterHandler.identity_provider. + """, + DeprecationWarning, + stacklevel=2, + ) + self.identity_provider.clear_login_cookie(self) + + def get_current_user(self) -> str: + """Get the current user.""" + clsname = self.__class__.__name__ + msg = ( + f"Calling `{clsname}.get_current_user()` directly is deprecated in jupyter-server 2.0." + " Use `self.current_user` instead (works in all versions)." + ) + if hasattr(self, "_jupyter_current_user"): + # backward-compat: return _jupyter_current_user + warnings.warn( + msg, + DeprecationWarning, + stacklevel=2, + ) + return cast(str, self._jupyter_current_user) + # haven't called get_user in prepare, raise + raise RuntimeError(msg) - Due to limitations of the cookie protocol, you must pass the same - path and domain to clear a cookie as were used when that cookie - was set (but there is no way to find out on the server side - which values were used for a given cookie). - """ - name = escape.native_str(name) - expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) - - morsel = Morsel() - morsel.set(name, "", '""') - morsel["expires"] = httputil.format_timestamp(expires) - morsel["path"] = path - if domain: - morsel["domain"] = domain - self.add_header("Set-Cookie", morsel.OutputString()) - - def clear_login_cookie(self): - cookie_options = self.settings.get("cookie_options", {}) - path = cookie_options.setdefault("path", self.base_url) - self.clear_cookie(self.cookie_name, path=path) - if path and path != "/": - # also clear cookie on / to ensure old cookies are cleared - # after the change in path behavior. - # N.B. This bypasses the normal cookie handling, which can't update - # two cookies with the same name. See the method above. - self.force_clear_cookie(self.cookie_name) - - def get_current_user(self): - if self.login_handler is None: - return "anonymous" - return self.login_handler.get_user(self) - - def skip_check_origin(self): + def skip_check_origin(self) -> bool: """Ask my login_handler if I should skip the origin_check For example: in the default LoginHandler, if a request is token-authenticated, @@ -147,53 +183,89 @@ def skip_check_origin(self): if self.request.method == "OPTIONS": # no origin-check on options requests, which are used to check origins! return True - if self.login_handler is None or not hasattr(self.login_handler, "should_check_origin"): - return False - return not self.login_handler.should_check_origin(self) + return not self.identity_provider.should_check_origin(self) @property - def token_authenticated(self): + def token_authenticated(self) -> bool: """Have I been authenticated with a token?""" - if self.login_handler is None or not hasattr(self.login_handler, "is_token_authenticated"): - return False - return self.login_handler.is_token_authenticated(self) + return self.identity_provider.is_token_authenticated(self) @property - def cookie_name(self): - default_cookie_name = non_alphanum.sub("-", f"username-{self.request.host}") - return self.settings.get("cookie_name", default_cookie_name) - - @property - def logged_in(self): + def logged_in(self) -> bool: """Is a user currently logged in?""" - user = self.get_current_user() - return user and not user == "anonymous" + user = self.current_user + return bool(user and user != "anonymous") @property - def login_handler(self): + def login_handler(self) -> Any: """Return the login handler for this application, if any.""" - return self.settings.get("login_handler_class", None) + warnings.warn( + """JupyterHandler.login_handler is deprecated in 2.0, + use JupyterHandler.identity_provider. + """, + DeprecationWarning, + stacklevel=2, + ) + return self.identity_provider.login_handler_class @property - def token(self): + def token(self) -> str | None: """Return the login token for this application, if any.""" - return self.settings.get("token", None) + return self.identity_provider.token @property - def login_available(self): + def login_available(self) -> bool: """May a user proceed to log in? This returns True if login capability is available, irrespective of whether the user is already logged in or not. """ - if self.login_handler is None: - return False - return bool(self.login_handler.get_login_available(self.settings)) + return cast(bool, self.identity_provider.login_available) @property - def authorizer(self): - return self.settings.get("authorizer") + def authorizer(self) -> Authorizer: + if "authorizer" not in self.settings: + warnings.warn( + "The Tornado web application does not have an 'authorizer' defined " + "in its settings. In future releases of jupyter_server, this will " + "be a required key for all subclasses of `JupyterHandler`. For an " + "example, see the jupyter_server source code for how to " + "add an authorizer to the tornado settings: " + "https://github.com/jupyter-server/jupyter_server/blob/" + "653740cbad7ce0c8a8752ce83e4d3c2c754b13cb/jupyter_server/serverapp.py" + "#L234-L256", + stacklevel=2, + ) + from jupyter_server.auth import AllowAllAuthorizer + + self.settings["authorizer"] = AllowAllAuthorizer( + config=self.settings.get("config", None), + identity_provider=self.identity_provider, + ) + + return cast("Authorizer", self.settings.get("authorizer")) + + @property + def identity_provider(self) -> IdentityProvider: + if "identity_provider" not in self.settings: + warnings.warn( + "The Tornado web application does not have an 'identity_provider' defined " + "in its settings. In future releases of jupyter_server, this will " + "be a required key for all subclasses of `JupyterHandler`. For an " + "example, see the jupyter_server source code for how to " + "add an identity provider to the tornado settings: " + "https://github.com/jupyter-server/jupyter_server/blob/v2.0.0/" + "jupyter_server/serverapp.py#L242", + stacklevel=2, + ) + from jupyter_server.auth import IdentityProvider + + # no identity provider set, load default + self.settings["identity_provider"] = IdentityProvider( + config=self.settings.get("config", None) + ) + return cast("IdentityProvider", self.settings["identity_provider"]) class JupyterHandler(AuthenticatedHandler): @@ -203,113 +275,120 @@ class JupyterHandler(AuthenticatedHandler): """ @property - def config(self): - return self.settings.get("config", None) + def config(self) -> dict[str, Any] | None: + return cast("dict[str, Any] | None", self.settings.get("config", None)) @property - def log(self): + def log(self) -> Logger: """use the Jupyter log by default, falling back on tornado's logger""" return log() @property - def jinja_template_vars(self): + def jinja_template_vars(self) -> dict[str, Any]: """User-supplied values to supply to jinja templates.""" - return self.settings.get("jinja_template_vars", {}) + return cast("dict[str, Any]", self.settings.get("jinja_template_vars", {})) @property - def serverapp(self): - return self.settings["serverapp"] + def serverapp(self) -> ServerApp | None: + return cast("ServerApp | None", self.settings["serverapp"]) # --------------------------------------------------------------- # URLs # --------------------------------------------------------------- @property - def version_hash(self): + def version_hash(self) -> str: """The version hash to use for cache hints for static files""" - return self.settings.get("version_hash", "") + return cast(str, self.settings.get("version_hash", "")) @property - def mathjax_url(self): - url = self.settings.get("mathjax_url", "") + def mathjax_url(self) -> str: + url = cast(str, self.settings.get("mathjax_url", "")) if not url or url_is_absolute(url): return url return url_path_join(self.base_url, url) @property - def mathjax_config(self): - return self.settings.get("mathjax_config", "TeX-AMS-MML_HTMLorMML-full,Safe") - - @property - def base_url(self): - return self.settings.get("base_url", "/") + def mathjax_config(self) -> str: + return cast(str, self.settings.get("mathjax_config", "TeX-AMS-MML_HTMLorMML-full,Safe")) @property - def default_url(self): - return self.settings.get("default_url", "") + def default_url(self) -> str: + return cast(str, self.settings.get("default_url", "")) @property - def ws_url(self): - return self.settings.get("websocket_url", "") + def ws_url(self) -> str: + return cast(str, self.settings.get("websocket_url", "")) @property - def contents_js_source(self): + def contents_js_source(self) -> str: self.log.debug( "Using contents: %s", self.settings.get("contents_js_source", "services/contents"), ) - return self.settings.get("contents_js_source", "services/contents") + return cast(str, self.settings.get("contents_js_source", "services/contents")) # --------------------------------------------------------------- # Manager objects # --------------------------------------------------------------- @property - def kernel_manager(self): - return self.settings["kernel_manager"] + def kernel_manager(self) -> AsyncMappingKernelManager: + return cast("AsyncMappingKernelManager", self.settings["kernel_manager"]) @property - def contents_manager(self): - return self.settings["contents_manager"] + def contents_manager(self) -> ContentsManager: + return cast("ContentsManager", self.settings["contents_manager"]) @property - def session_manager(self): - return self.settings["session_manager"] + def session_manager(self) -> SessionManager: + return cast("SessionManager", self.settings["session_manager"]) @property - def terminal_manager(self): - return self.settings["terminal_manager"] + def terminal_manager(self) -> TerminalManager: + return cast("TerminalManager", self.settings["terminal_manager"]) @property - def kernel_spec_manager(self): - return self.settings["kernel_spec_manager"] + def kernel_spec_manager(self) -> KernelSpecManager: + return cast("KernelSpecManager", self.settings["kernel_spec_manager"]) @property - def config_manager(self): - return self.settings["config_manager"] + def config_manager(self) -> ConfigManager: + return cast("ConfigManager", self.settings["config_manager"]) + + @property + def event_logger(self) -> EventLogger: + return cast("EventLogger", self.settings["event_logger"]) # --------------------------------------------------------------- # CORS # --------------------------------------------------------------- @property - def allow_origin(self): + def allow_origin(self) -> str: """Normal Access-Control-Allow-Origin""" - return self.settings.get("allow_origin", "") + return cast(str, self.settings.get("allow_origin", "")) @property - def allow_origin_pat(self): + def allow_origin_pat(self) -> str | None: """Regular expression version of allow_origin""" - return self.settings.get("allow_origin_pat", None) + return cast("str | None", self.settings.get("allow_origin_pat", None)) @property - def allow_credentials(self): + def allow_credentials(self) -> bool: """Whether to set Access-Control-Allow-Credentials""" - return self.settings.get("allow_credentials", False) + return cast(bool, self.settings.get("allow_credentials", False)) - def set_default_headers(self): + def set_default_headers(self) -> None: """Add CORS headers, if defined""" super().set_default_headers() + + def set_cors_headers(self) -> None: + """Add CORS headers, if defined + + Now that current_user is async (jupyter-server 2.0), + must be called at the end of prepare(), instead of in set_default_headers. + """ if self.allow_origin: self.set_header("Access-Control-Allow-Origin", self.allow_origin) elif self.allow_origin_pat: @@ -326,7 +405,7 @@ def set_default_headers(self): if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", "true") - def set_attachment_header(self, filename): + def set_attachment_header(self, filename: str) -> None: """Set Content-Disposition: attachment header As a method to ensure handling of filename encoding @@ -334,13 +413,10 @@ def set_attachment_header(self, filename): escaped_filename = url_escape(filename) self.set_header( "Content-Disposition", - "attachment;" - " filename*=utf-8''{utf8}".format( - utf8=escaped_filename, - ), + f"attachment; filename*=utf-8''{escaped_filename}", ) - def get_origin(self): + def get_origin(self) -> str | None: # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's @@ -353,7 +429,7 @@ def get_origin(self): # origin_to_satisfy_tornado is present because tornado requires # check_origin to take an origin argument, but we don't use it - def check_origin(self, origin_to_satisfy_tornado=""): + def check_origin(self, origin_to_satisfy_tornado: str = "") -> bool: """Check Origin for cross-site API requests, including websockets Copied from WebSocket with changes: @@ -385,7 +461,7 @@ def check_origin(self, origin_to_satisfy_tornado=""): # Check CORS headers if self.allow_origin: - allow = self.allow_origin == origin + allow = bool(self.allow_origin == origin) elif self.allow_origin_pat: allow = bool(re.match(self.allow_origin_pat, origin)) else: @@ -400,7 +476,7 @@ def check_origin(self, origin_to_satisfy_tornado=""): ) return allow - def check_referer(self): + def check_referer(self) -> bool: """Check Referer for cross-site requests. Disables requests to certain endpoints with external or missing Referer. @@ -446,15 +522,18 @@ def check_referer(self): ) return allow - def check_xsrf_cookie(self): + def check_xsrf_cookie(self) -> None: """Bypass xsrf cookie checks when token-authenticated""" + if not hasattr(self, "_jupyter_current_user"): + # Called too early, will be checked later + return None if self.token_authenticated or self.settings.get("disable_check_xsrf", False): # Token-authenticated requests do not need additional XSRF-check # Servers without authentication are vulnerable to XSRF - return + return None try: return super().check_xsrf_cookie() - except web.HTTPError: + except web.HTTPError as e: if self.request.method in {"GET", "HEAD"}: # Consider Referer a sufficient cross-origin check for GET requests if not self.check_referer(): @@ -463,11 +542,11 @@ def check_xsrf_cookie(self): msg = f"Blocking Cross Origin request from {referer}." else: msg = "Blocking request from unknown origin" - raise web.HTTPError(403, msg) + raise web.HTTPError(403, msg) from e else: raise - def check_host(self): + def check_host(self) -> bool: """Check the host header if remote access disallowed. Returns True if the request should continue, False otherwise. @@ -476,7 +555,9 @@ def check_host(self): return True # Remove port (e.g. ':8888') from host - host = re.match(r"^(.*?)(:\d+)?$", self.request.host).group(1) + match = re.match(r"^(.*?)(:\d+)?$", self.request.host) + assert match is not None + host = match.group(1) # Browsers format IPv6 addresses like [::1]; we need to remove the [] if host.startswith("[") and host.endswith("]"): @@ -507,9 +588,47 @@ def check_host(self): ) return allow - def prepare(self): + async def prepare(self) -> Awaitable[None] | None: # type:ignore[override] + """Prepare a response.""" + # Set the current Jupyter Handler context variable. + CallContext.set(CallContext.JUPYTER_HANDLER, self) + if not self.check_host(): + self.current_user = self._jupyter_current_user = None raise web.HTTPError(403) + + from jupyter_server.auth import IdentityProvider + + mod_obj = inspect.getmodule(self.get_current_user) + assert mod_obj is not None + user: User | None = None + + if type(self.identity_provider) is IdentityProvider and mod_obj.__name__ != __name__: + # check for overridden get_current_user + default IdentityProvider + # deprecated way to override auth (e.g. JupyterHub < 3.0) + # allow deprecated, overridden get_current_user + warnings.warn( + "Overriding JupyterHandler.get_current_user is deprecated in jupyter-server 2.0." + " Use an IdentityProvider class.", + DeprecationWarning, + stacklevel=1, + ) + user = User(self.get_current_user()) + else: + _user = self.identity_provider.get_user(self) + if isinstance(_user, Awaitable): + # IdentityProvider.get_user _may_ be async + _user = await _user + user = _user + + # self.current_user for tornado's @web.authenticated + # self._jupyter_current_user for backward-compat in deprecated get_current_user calls + # and our own private checks for whether .current_user has been set + self.current_user = self._jupyter_current_user = user + # complete initial steps which require auth to resolve first: + self.set_cors_headers() + if self.request.method not in {"GET", "HEAD", "OPTIONS"}: + self.check_xsrf_cookie() return super().prepare() # --------------------------------------------------------------- @@ -521,19 +640,21 @@ def get_template(self, name): return self.settings["jinja2_env"].get_template(name) def render_template(self, name, **ns): + """Render a template by name.""" ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) @property - def template_namespace(self): + def template_namespace(self) -> dict[str, Any]: return dict( base_url=self.base_url, default_url=self.default_url, ws_url=self.ws_url, logged_in=self.logged_in, - allow_password_change=self.settings.get("allow_password_change"), - login_available=self.login_available, + allow_password_change=getattr(self.identity_provider, "allow_password_change", False), + auth_enabled=self.identity_provider.auth_enabled, + login_available=self.identity_provider.login_available, token_available=bool(self.token), static_url=self.static_url, sys_info=json_sys_info(), @@ -548,7 +669,7 @@ def template_namespace(self): **self.jinja_template_vars, ) - def get_json_body(self): + def get_json_body(self) -> dict[str, Any] | None: """Return the body of the request as JSON data.""" if not self.request.body: return None @@ -560,14 +681,14 @@ def get_json_body(self): self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, "Invalid JSON in body of request") from e - return model + return cast("dict[str, Any]", model) - def write_error(self, status_code, **kwargs): + def write_error(self, status_code: int, **kwargs: Any) -> None: """render custom error pages""" exc_info = kwargs.get("exc_info") message = "" status_message = responses.get(status_code, "Unknown HTTP Error") - exception = "(unknown)" + if exc_info: exception = exc_info[1] # get the custom message, if defined @@ -580,14 +701,16 @@ def write_error(self, status_code, **kwargs): reason = getattr(exception, "reason", "") if reason: status_message = reason + else: + exception = "(unknown)" # build template namespace - ns = dict( - status_code=status_code, - status_message=status_message, - message=message, - exception=exception, - ) + ns = { + "status_code": status_code, + "status_message": status_message, + "message": message, + "exception": exception, + } self.set_header("Content-Type", "text/html") # render the template @@ -602,16 +725,17 @@ def write_error(self, status_code, **kwargs): class APIHandler(JupyterHandler): """Base class for API handlers""" - def prepare(self): + async def prepare(self) -> None: + """Prepare an API response.""" + await super().prepare() if not self.check_origin(): raise web.HTTPError(404) - return super().prepare() - def write_error(self, status_code, **kwargs): + def write_error(self, status_code: int, **kwargs: Any) -> None: """APIHandler errors are JSON, not human pages""" self.set_header("Content-Type", "application/json") message = responses.get(status_code, "Unknown HTTP Error") - reply = { + reply: dict[str, Any] = { "message": message, } exc_info = kwargs.get("exc_info") @@ -623,19 +747,14 @@ def write_error(self, status_code, **kwargs): else: reply["message"] = "Unhandled error" reply["reason"] = None - reply["traceback"] = "".join(traceback.format_exception(*exc_info)) - self.log.warning(reply["message"]) + # backward-compatibility: traceback field is present, + # but always empty + reply["traceback"] = "" + self.log.warning("wrote error: %r", reply["message"], exc_info=True) self.finish(json.dumps(reply)) - def get_current_user(self): - """Raise 403 on API handlers instead of redirecting to human login page""" - # preserve _user_cache so we don't raise more than once - if hasattr(self, "_user_cache"): - return self._user_cache - self._user_cache = user = super().get_current_user() - return user - - def get_login_url(self): + def get_login_url(self) -> str: + """Get the login url.""" # if get_login_url is invoked in an API handler, # that means @web.authenticated is trying to trigger a redirect. # instead of redirecting, raise 403 instead. @@ -644,7 +763,7 @@ def get_login_url(self): return super().get_login_url() @property - def content_security_policy(self): + def content_security_policy(self) -> str: csp = "; ".join( [ super().content_security_policy, @@ -656,22 +775,26 @@ def content_security_policy(self): # set _track_activity = False on API handlers that shouldn't track activity _track_activity = True - def update_api_activity(self): + def update_api_activity(self) -> None: """Update last_activity of API requests""" # record activity of authenticated requests if ( self._track_activity - and getattr(self, "_user_cache", None) + and getattr(self, "_jupyter_current_user", None) and self.get_argument("no_track_activity", None) is None ): self.settings["api_last_activity"] = utcnow() - def finish(self, *args, **kwargs): + def finish(self, *args: Any, **kwargs: Any) -> Future[Any]: + """Finish an API response.""" self.update_api_activity() - self.set_header("Content-Type", "application/json") + # Allow caller to indicate content-type... + set_content_type = kwargs.pop("set_content_type", "application/json") + self.set_header("Content-Type", set_content_type) return super().finish(*args, **kwargs) - def options(self, *args, **kwargs): + def options(self, *args: Any, **kwargs: Any) -> None: + """Get the options.""" if "Access-Control-Allow-Headers" in self.settings.get("headers", {}): self.set_header( "Access-Control-Allow-Headers", @@ -713,33 +836,46 @@ def options(self, *args, **kwargs): class Template404(JupyterHandler): """Render our 404 template""" - def prepare(self): + async def prepare(self) -> None: + """Prepare a 404 response.""" + await super().prepare() raise web.HTTPError(404) class AuthenticatedFileHandler(JupyterHandler, web.StaticFileHandler): """static files should only be accessible when logged in""" + auth_resource = "contents" + @property - def content_security_policy(self): + def content_security_policy(self) -> str: # In case we're serving HTML/SVG, confine any Javascript to a unique # origin so it can't interact with the Jupyter server. return super().content_security_policy + "; sandbox allow-scripts" @web.authenticated - def head(self, path): + @authorized + def head(self, path: str) -> Awaitable[None]: # type:ignore[override] + """Get the head response for a path.""" self.check_xsrf_cookie() return super().head(path) @web.authenticated - def get(self, path): - if os.path.splitext(path)[1] == ".ipynb" or self.get_argument("download", False): + @authorized + def get( # type:ignore[override] + self, path: str, **kwargs: Any + ) -> Awaitable[None]: + """Get a file by path.""" + self.check_xsrf_cookie() + if os.path.splitext(path)[1] == ".ipynb" or self.get_argument("download", None): name = path.rsplit("/", 1)[-1] self.set_attachment_header(name) - return web.StaticFileHandler.get(self, path) + return web.StaticFileHandler.get(self, path, **kwargs) - def get_content_type(self): + def get_content_type(self) -> str: + """Get the content type.""" + assert self.absolute_path is not None path = self.absolute_path.strip("/") if "/" in path: _, name = path.rsplit("/", 1) @@ -754,16 +890,18 @@ def get_content_type(self): else: return super().get_content_type() - def set_headers(self): + def set_headers(self) -> None: + """Set the headers.""" super().set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments: self.add_header("Cache-Control", "no-cache") - def compute_etag(self): + def compute_etag(self) -> str | None: + """Compute the etag.""" return None - def validate_absolute_path(self, root, absolute_path): + def validate_absolute_path(self, root: str, absolute_path: str) -> str: """Validate and return the absolute path. Requires tornado 3.1 @@ -772,7 +910,8 @@ def validate_absolute_path(self, root, absolute_path): """ abs_path = super().validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) - if is_hidden(abs_path, abs_root) and not self.contents_manager.allow_hidden: + assert abs_path is not None + if not self.contents_manager.allow_hidden and is_hidden(abs_path, abs_root): self.log.info( "Refusing to serve hidden file, via 404 Error, use flag 'ContentsManager.allow_hidden' to enable" ) @@ -780,7 +919,7 @@ def validate_absolute_path(self, root, absolute_path): return abs_path -def json_errors(method): +def json_errors(method: Any) -> Any: # pragma: no cover """Decorate methods with this to return GitHub style JSON errors. This should be used on any JSON API on any handler method that can raise HTTPErrors. @@ -815,33 +954,55 @@ def wrapper(self, *args, **kwargs): class FileFindHandler(JupyterHandler, web.StaticFileHandler): - """subclass of StaticFileHandler for serving files from a search path""" + """subclass of StaticFileHandler for serving files from a search path + + The setting "static_immutable_cache" can be set up to serve some static + file as immutable (e.g. file name containing a hash). The setting is a + list of base URL, every static file URL starting with one of those will + be immutable. + """ # cache search results, don't search for files more than once - _static_paths = {} + _static_paths: dict[str, str] = {} + root: tuple[str] # type:ignore[assignment] - def set_headers(self): + def set_headers(self) -> None: + """Set the headers.""" super().set_headers() + + immutable_paths = self.settings.get("static_immutable_cache", []) + + # allow immutable cache for files + if any(self.request.path.startswith(path) for path in immutable_paths): + self.set_header("Cache-Control", "public, max-age=31536000, immutable") + # disable browser caching, rely on 304 replies for savings - if "v" not in self.request.arguments or any( + elif "v" not in self.request.arguments or any( self.request.path.startswith(path) for path in self.no_cache_paths ): self.set_header("Cache-Control", "no-cache") - def initialize(self, path, default_filename=None, no_cache_paths=None): + def initialize( + self, + path: str | list[str], + default_filename: str | None = None, + no_cache_paths: list[str] | None = None, + ) -> None: + """Initialize the file find handler.""" self.no_cache_paths = no_cache_paths or [] if isinstance(path, str): path = [path] - self.root = tuple(os.path.abspath(os.path.expanduser(p)) + os.sep for p in path) + self.root = tuple(os.path.abspath(os.path.expanduser(p)) + os.sep for p in path) # type:ignore[assignment] self.default_filename = default_filename - def compute_etag(self): + def compute_etag(self) -> str | None: + """Compute the etag.""" return None @classmethod - def get_absolute_path(cls, roots, path): + def get_absolute_path(cls, roots: Sequence[str], path: str) -> str: """locate a file to serve on our static file search path""" with cls._lock: if path in cls._static_paths: @@ -857,9 +1018,9 @@ def get_absolute_path(cls, roots, path): log().debug(f"Path {path} served from {abspath}") return abspath - def validate_absolute_path(self, root, absolute_path): + def validate_absolute_path(self, root: str, absolute_path: str) -> str | None: """check if the file should be served (raises 404, 403, etc.)""" - if absolute_path == "": + if not absolute_path: raise web.HTTPError(404) for root in self.root: @@ -870,7 +1031,12 @@ def validate_absolute_path(self, root, absolute_path): class APIVersionHandler(APIHandler): - def get(self): + """An API handler for the server version.""" + + _track_activity = False + + def get(self) -> None: + """Get the server version info.""" # not authenticated, so give as few info as possible self.finish(json.dumps({"version": jupyter_server.__version__})) @@ -881,7 +1047,9 @@ class TrailingSlashHandler(web.RequestHandler): This should be the first, highest priority handler. """ - def get(self): + def get(self) -> None: + """Handle trailing slashes in a get.""" + assert self.request.uri is not None path, *rest = self.request.uri.partition("?") # trim trailing *and* leading / # to avoid misinterpreting repeated '//' @@ -895,7 +1063,8 @@ def get(self): class MainHandler(JupyterHandler): """Simple handler for base_url.""" - def get(self): + def get(self) -> None: + """Get the main template.""" html = self.render_template("main.html") self.write(html) @@ -906,7 +1075,7 @@ class FilesRedirectHandler(JupyterHandler): """Handler for redirecting relative URLs to the /files/ handler""" @staticmethod - async def redirect_to_files(self, path): + async def redirect_to_files(self: Any, path: str) -> None: """make redirect logic a reusable static method so it can be called from other handlers. @@ -934,18 +1103,20 @@ async def redirect_to_files(self, path): self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) - def get(self, path=""): - return self.redirect_to_files(self, path) + async def get(self, path: str = "") -> None: + return await self.redirect_to_files(self, path) class RedirectWithParams(web.RequestHandler): """Sam as web.RedirectHandler, but preserves URL parameters""" - def initialize(self, url, permanent=True): + def initialize(self, url: str, permanent: bool = True) -> None: + """Initialize a redirect handler.""" self._url = url self._permanent = permanent - def get(self): + def get(self) -> None: + """Get a redirect.""" sep = "&" if "?" in self._url else "?" url = sep.join([self._url, self.request.query]) self.redirect(url, permanent=self._permanent) @@ -953,10 +1124,11 @@ def get(self): class PrometheusMetricsHandler(JupyterHandler): """ - Return prometheus metrics for this notebook server + Return prometheus metrics for this server """ - def get(self): + def get(self) -> None: + """Get prometheus metrics.""" if self.settings["authenticate_prometheus"] and not self.logged_in: raise web.HTTPError(403) @@ -965,7 +1137,7 @@ def get(self): # ----------------------------------------------------------------------------- -# URL pattern fragments for re-use +# URL pattern fragments for reuse # ----------------------------------------------------------------------------- # path matches any number of `/foo[/bar...]` or just `/` or '' diff --git a/jupyter_server/base/websocket.py b/jupyter_server/base/websocket.py new file mode 100644 index 0000000000..a27b7a72a7 --- /dev/null +++ b/jupyter_server/base/websocket.py @@ -0,0 +1,128 @@ +"""Base websocket classes.""" +import re +from typing import Optional, no_type_check +from urllib.parse import urlparse + +from tornado import ioloop +from tornado.iostream import IOStream + +# ping interval for keeping websockets alive (30 seconds) +WS_PING_INTERVAL = 30000 + + +class WebSocketMixin: + """Mixin for common websocket options""" + + ping_callback = None + last_ping = 0.0 + last_pong = 0.0 + stream: Optional[IOStream] = None + + @property + def ping_interval(self): + """The interval for websocket keep-alive pings. + + Set ws_ping_interval = 0 to disable pings. + """ + return self.settings.get("ws_ping_interval", WS_PING_INTERVAL) # type:ignore[attr-defined] + + @property + def ping_timeout(self): + """If no ping is received in this many milliseconds, + close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). + Default is max of 3 pings or 30 seconds. + """ + return self.settings.get( # type:ignore[attr-defined] + "ws_ping_timeout", max(3 * self.ping_interval, WS_PING_INTERVAL) + ) + + @no_type_check + def check_origin(self, origin: Optional[str] = None) -> bool: + """Check Origin == Host or Access-Control-Allow-Origin. + + Tornado >= 4 calls this method automatically, raising 403 if it returns False. + """ + + if self.allow_origin == "*" or ( + hasattr(self, "skip_check_origin") and self.skip_check_origin() + ): + return True + + host = self.request.headers.get("Host") + if origin is None: + origin = self.get_origin() + + # If no origin or host header is provided, assume from script + if origin is None or host is None: + return True + + origin = origin.lower() + origin_host = urlparse(origin).netloc + + # OK if origin matches host + if origin_host == host: + return True + + # Check CORS headers + if self.allow_origin: + allow = self.allow_origin == origin + elif self.allow_origin_pat: + allow = bool(re.match(self.allow_origin_pat, origin)) + else: + # No CORS headers deny the request + allow = False + if not allow: + self.log.warning( + "Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", + origin, + host, + ) + return allow + + def clear_cookie(self, *args, **kwargs): + """meaningless for websockets""" + + @no_type_check + def open(self, *args, **kwargs): + """Open the websocket.""" + self.log.debug("Opening websocket %s", self.request.path) + + # start the pinging + if self.ping_interval > 0: + loop = ioloop.IOLoop.current() + self.last_ping = loop.time() # Remember time of last ping + self.last_pong = self.last_ping + self.ping_callback = ioloop.PeriodicCallback( + self.send_ping, + self.ping_interval, + ) + self.ping_callback.start() + return super().open(*args, **kwargs) + + @no_type_check + def send_ping(self): + """send a ping to keep the websocket alive""" + if self.ws_connection is None and self.ping_callback is not None: + self.ping_callback.stop() + return + + if self.ws_connection.client_terminated: + self.close() + return + + # check for timeout on pong. Make sure that we really have sent a recent ping in + # case the machine with both server and client has been suspended since the last ping. + now = ioloop.IOLoop.current().time() + since_last_pong = 1e3 * (now - self.last_pong) + since_last_ping = 1e3 * (now - self.last_ping) + if since_last_ping < 2 * self.ping_interval and since_last_pong > self.ping_timeout: + self.log.warning("WebSocket ping timeout after %i ms.", since_last_pong) + self.close() + return + + self.ping(b"") + self.last_ping = now + + def on_pong(self, data): + """Handle a pong message.""" + self.last_pong = ioloop.IOLoop.current().time() diff --git a/jupyter_server/base/zmqhandlers.py b/jupyter_server/base/zmqhandlers.py index 28e296c722..4490380a34 100644 --- a/jupyter_server/base/zmqhandlers.py +++ b/jupyter_server/base/zmqhandlers.py @@ -1,348 +1,19 @@ -"""Tornado handlers for WebSocket <-> ZMQ sockets.""" -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. -import json -import re -import struct -import sys -from urllib.parse import urlparse +"""This module is deprecated in Jupyter Server 2.0""" +# Raise a warning that this module is deprecated. +import warnings -import tornado - -try: - from jupyter_client.jsonutil import json_default -except ImportError: - from jupyter_client.jsonutil import date_default as json_default - -from jupyter_client.jsonutil import extract_dates -from jupyter_client.session import Session -from tornado import ioloop, web from tornado.websocket import WebSocketHandler -from jupyter_server.auth.utils import warn_disabled_authorization - -from .handlers import JupyterHandler - - -def serialize_binary_message(msg): - """serialize a message as a binary blob - - Header: - - 4 bytes: number of msg parts (nbufs) as 32b int - 4 * nbufs bytes: offset for each buffer as integer as 32b int - - Offsets are from the start of the buffer, including the header. - - Returns - ------- - The message serialized to bytes. - - """ - # don't modify msg or buffer list in-place - msg = msg.copy() - buffers = list(msg.pop("buffers")) - if sys.version_info < (3, 4): - buffers = [x.tobytes() for x in buffers] - bmsg = json.dumps(msg, default=json_default).encode("utf8") - buffers.insert(0, bmsg) - nbufs = len(buffers) - offsets = [4 * (nbufs + 1)] - for buf in buffers[:-1]: - offsets.append(offsets[-1] + len(buf)) - offsets_buf = struct.pack("!" + "I" * (nbufs + 1), nbufs, *offsets) - buffers.insert(0, offsets_buf) - return b"".join(buffers) - - -def deserialize_binary_message(bmsg): - """deserialize a message from a binary blog - - Header: - - 4 bytes: number of msg parts (nbufs) as 32b int - 4 * nbufs bytes: offset for each buffer as integer as 32b int - - Offsets are from the start of the buffer, including the header. - - Returns - ------- - message dictionary - """ - nbufs = struct.unpack("!i", bmsg[:4])[0] - offsets = list(struct.unpack("!" + "I" * nbufs, bmsg[4 : 4 * (nbufs + 1)])) - offsets.append(None) - bufs = [] - for start, stop in zip(offsets[:-1], offsets[1:]): - bufs.append(bmsg[start:stop]) - msg = json.loads(bufs[0].decode("utf8")) - msg["header"] = extract_dates(msg["header"]) - msg["parent_header"] = extract_dates(msg["parent_header"]) - msg["buffers"] = bufs[1:] - return msg - - -def serialize_msg_to_ws_v1(msg_or_list, channel, pack=None): - if pack: - msg_list = [ - pack(msg_or_list["header"]), - pack(msg_or_list["parent_header"]), - pack(msg_or_list["metadata"]), - pack(msg_or_list["content"]), - ] - else: - msg_list = msg_or_list - channel = channel.encode("utf-8") - offsets = [] - offsets.append(8 * (1 + 1 + len(msg_list) + 1)) - offsets.append(len(channel) + offsets[-1]) - for msg in msg_list: - offsets.append(len(msg) + offsets[-1]) - offset_number = len(offsets).to_bytes(8, byteorder="little") - offsets = [offset.to_bytes(8, byteorder="little") for offset in offsets] - bin_msg = b"".join([offset_number] + offsets + [channel] + msg_list) - return bin_msg - - -def deserialize_msg_from_ws_v1(ws_msg): - offset_number = int.from_bytes(ws_msg[:8], "little") - offsets = [ - int.from_bytes(ws_msg[8 * (i + 1) : 8 * (i + 2)], "little") for i in range(offset_number) - ] - channel = ws_msg[offsets[0] : offsets[1]].decode("utf-8") - msg_list = [ws_msg[offsets[i] : offsets[i + 1]] for i in range(1, offset_number - 1)] - return channel, msg_list - - -# ping interval for keeping websockets alive (30 seconds) -WS_PING_INTERVAL = 30000 - - -class WebSocketMixin: - """Mixin for common websocket options""" - - ping_callback = None - last_ping = 0 - last_pong = 0 - stream = None - - @property - def ping_interval(self): - """The interval for websocket keep-alive pings. - - Set ws_ping_interval = 0 to disable pings. - """ - return self.settings.get("ws_ping_interval", WS_PING_INTERVAL) - - @property - def ping_timeout(self): - """If no ping is received in this many milliseconds, - close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). - Default is max of 3 pings or 30 seconds. - """ - return self.settings.get("ws_ping_timeout", max(3 * self.ping_interval, WS_PING_INTERVAL)) - - def check_origin(self, origin=None): - """Check Origin == Host or Access-Control-Allow-Origin. - - Tornado >= 4 calls this method automatically, raising 403 if it returns False. - """ - - if self.allow_origin == "*" or ( - hasattr(self, "skip_check_origin") and self.skip_check_origin() - ): - return True - - host = self.request.headers.get("Host") - if origin is None: - origin = self.get_origin() - - # If no origin or host header is provided, assume from script - if origin is None or host is None: - return True - - origin = origin.lower() - origin_host = urlparse(origin).netloc - - # OK if origin matches host - if origin_host == host: - return True - - # Check CORS headers - if self.allow_origin: - allow = self.allow_origin == origin - elif self.allow_origin_pat: - allow = bool(re.match(self.allow_origin_pat, origin)) - else: - # No CORS headers deny the request - allow = False - if not allow: - self.log.warning( - "Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", - origin, - host, - ) - return allow - - def clear_cookie(self, *args, **kwargs): - """meaningless for websockets""" - pass - - def open(self, *args, **kwargs): - self.log.debug("Opening websocket %s", self.request.path) - - # start the pinging - if self.ping_interval > 0: - loop = ioloop.IOLoop.current() - self.last_ping = loop.time() # Remember time of last ping - self.last_pong = self.last_ping - self.ping_callback = ioloop.PeriodicCallback( - self.send_ping, - self.ping_interval, - ) - self.ping_callback.start() - return super().open(*args, **kwargs) - - def send_ping(self): - """send a ping to keep the websocket alive""" - if self.ws_connection is None and self.ping_callback is not None: - self.ping_callback.stop() - return - - if self.ws_connection.client_terminated: - self.close() - return - - # check for timeout on pong. Make sure that we really have sent a recent ping in - # case the machine with both server and client has been suspended since the last ping. - now = ioloop.IOLoop.current().time() - since_last_pong = 1e3 * (now - self.last_pong) - since_last_ping = 1e3 * (now - self.last_ping) - if since_last_ping < 2 * self.ping_interval and since_last_pong > self.ping_timeout: - self.log.warning("WebSocket ping timeout after %i ms.", since_last_pong) - self.close() - return - - self.ping(b"") - self.last_ping = now - - def on_pong(self, data): - self.last_pong = ioloop.IOLoop.current().time() - - -class ZMQStreamHandler(WebSocketMixin, WebSocketHandler): - - if tornado.version_info < (4, 1): - """Backport send_error from tornado 4.1 to 4.0""" - - def send_error(self, *args, **kwargs): - if self.stream is None: - super(WebSocketHandler, self).send_error(*args, **kwargs) - else: - # If we get an uncaught exception during the handshake, - # we have no choice but to abruptly close the connection. - # TODO: for uncaught exceptions after the handshake, - # we can close the connection more gracefully. - self.stream.close() - - def _reserialize_reply(self, msg_or_list, channel=None): - """Reserialize a reply message using JSON. - - msg_or_list can be an already-deserialized msg dict or the zmq buffer list. - If it is the zmq list, it will be deserialized with self.session. - - This takes the msg list from the ZMQ socket and serializes the result for the websocket. - This method should be used by self._on_zmq_reply to build messages that can - be sent back to the browser. - - """ - if isinstance(msg_or_list, dict): - # already unpacked - msg = msg_or_list - else: - idents, msg_list = self.session.feed_identities(msg_or_list) - msg = self.session.deserialize(msg_list) - if channel: - msg["channel"] = channel - if msg["buffers"]: - buf = serialize_binary_message(msg) - return buf - else: - return json.dumps(msg, default=json_default) - - def select_subprotocol(self, subprotocols): - preferred_protocol = self.settings.get("kernel_ws_protocol") - if preferred_protocol is None: - preferred_protocol = "v1.kernel.websocket.jupyter.org" - elif preferred_protocol == "": - preferred_protocol = None - selected_subprotocol = preferred_protocol if preferred_protocol in subprotocols else None - # None is the default, "legacy" protocol - return selected_subprotocol - - def _on_zmq_reply(self, stream, msg_list): - # Sometimes this gets triggered when the on_close method is scheduled in the - # eventloop but hasn't been called. - if self.ws_connection is None or stream.closed(): - self.log.warning("zmq message arrived on closed channel") - self.close() - return - channel = getattr(stream, "channel", None) - if self.selected_subprotocol == "v1.kernel.websocket.jupyter.org": - bin_msg = serialize_msg_to_ws_v1(msg_list, channel) - self.write_message(bin_msg, binary=True) - else: - try: - msg = self._reserialize_reply(msg_list, channel=channel) - except Exception: - self.log.critical("Malformed message: %r" % msg_list, exc_info=True) - else: - self.write_message(msg, binary=isinstance(msg, bytes)) - - -class AuthenticatedZMQStreamHandler(ZMQStreamHandler, JupyterHandler): - def set_default_headers(self): - """Undo the set_default_headers in JupyterHandler - - which doesn't make sense for websockets - """ - pass - - def pre_get(self): - """Run before finishing the GET request - - Extend this method to add logic that should fire before - the websocket finishes completing. - """ - # authenticate the request before opening the websocket - user = self.get_current_user() - if user is None: - self.log.warning("Couldn't authenticate WebSocket connection") - raise web.HTTPError(403) - - # authorize the user. - if not self.authorizer: - # Warn if there is not authorizer. - warn_disabled_authorization() - elif not self.authorizer.is_authorized(self, user, "execute", "kernels"): - raise web.HTTPError(403) - - if self.get_argument("session_id", False): - self.session.session = self.get_argument("session_id") - else: - self.log.warning("No session ID specified") - - async def get(self, *args, **kwargs): - # pre_get can be a coroutine in subclasses - # assign and yield in two step to avoid tornado 3 issues - res = self.pre_get() - await res - res = super().get(*args, **kwargs) - await res - - def initialize(self): - self.log.debug("Initializing websocket connection %s", self.request.path) - self.session = Session(config=self.config) - - def get_compression_options(self): - return self.settings.get("websocket_compression_options", None) +from jupyter_server.base.websocket import WebSocketMixin +from jupyter_server.services.kernels.connection.base import ( + deserialize_binary_message, + deserialize_msg_from_ws_v1, + serialize_binary_message, + serialize_msg_to_ws_v1, +) + +warnings.warn( + "jupyter_server.base.zmqhandlers module is deprecated in Jupyter Server 2.0", + DeprecationWarning, + stacklevel=2, +) diff --git a/jupyter_server/config_manager.py b/jupyter_server/config_manager.py index 25c5efd28f..87480d7609 100644 --- a/jupyter_server/config_manager.py +++ b/jupyter_server/config_manager.py @@ -1,17 +1,22 @@ """Manager to read and modify config data in JSON files.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import copy import errno import glob import json import os +import typing as t from traitlets.config import LoggingConfigurable from traitlets.traitlets import Bool, Unicode +StrDict = t.Dict[str, t.Any] + -def recursive_update(target, new): +def recursive_update(target: StrDict, new: StrDict) -> None: """Recursively update one dictionary using another. None values will delete their keys. @@ -32,7 +37,7 @@ def recursive_update(target, new): target[k] = v -def remove_defaults(data, defaults): +def remove_defaults(data: StrDict, defaults: StrDict) -> None: """Recursively remove items from dict that are already in defaults""" # copy the iterator, since data will be modified for key, value in list(data.items()): @@ -41,9 +46,8 @@ def remove_defaults(data, defaults): remove_defaults(data[key], defaults[key]) if not data[key]: # prune empty subdicts del data[key] - else: - if value == defaults[key]: - del data[key] + elif value == defaults[key]: + del data[key] class BaseJSONConfigManager(LoggingConfigurable): @@ -56,7 +60,7 @@ class BaseJSONConfigManager(LoggingConfigurable): config_dir = Unicode(".") read_directory = Bool(True) - def ensure_config_dir_exists(self): + def ensure_config_dir_exists(self) -> None: """Will try to create the config_dir directory.""" try: os.makedirs(self.config_dir, 0o755) @@ -64,15 +68,15 @@ def ensure_config_dir_exists(self): if e.errno != errno.EEXIST: raise - def file_name(self, section_name): + def file_name(self, section_name: str) -> str: """Returns the json filename for the section_name: {config_dir}/{section_name}.json""" return os.path.join(self.config_dir, section_name + ".json") - def directory(self, section_name): + def directory(self, section_name: str) -> str: """Returns the directory name for the section name: {config_dir}/{section_name}.d""" return os.path.join(self.config_dir, section_name + ".d") - def get(self, section_name, include_root=True): + def get(self, section_name: str, include_root: bool = True) -> dict[str, t.Any]: """Retrieve the config data for the specified section. Returns the data as a dictionary, or an empty dictionary if the file @@ -95,14 +99,14 @@ def get(self, section_name, include_root=True): section_name, "\n\t".join(paths), ) - data = {} + data: dict[str, t.Any] = {} for path in paths: if os.path.isfile(path): with open(path, encoding="utf-8") as f: recursive_update(data, json.load(f)) return data - def set(self, section_name, data): + def set(self, section_name: str, data: t.Any) -> None: """Store the given config data.""" filename = self.file_name(section_name) self.ensure_config_dir_exists() @@ -116,11 +120,10 @@ def set(self, section_name, data): # Generate the JSON up front, since it could raise an exception, # in order to avoid writing half-finished corrupted data to disk. json_content = json.dumps(data, indent=2) - f = open(filename, "w", encoding="utf-8") - with f: + with open(filename, "w", encoding="utf-8") as f: f.write(json_content) - def update(self, section_name, new_data): + def update(self, section_name: str, new_data: t.Any) -> dict[str, t.Any]: """Modify the config section by recursively updating it with new_data. Returns the modified config data as a dictionary. diff --git a/jupyter_server/event_schemas/contents_service/v1.yaml b/jupyter_server/event_schemas/contents_service/v1.yaml new file mode 100644 index 0000000000..a787f9b2b0 --- /dev/null +++ b/jupyter_server/event_schemas/contents_service/v1.yaml @@ -0,0 +1,73 @@ +"$id": https://events.jupyter.org/jupyter_server/contents_service/v1 +version: 1 +title: Contents Manager activities +personal-data: true +description: | + Record actions on files via the ContentsManager. + + The notebook ContentsManager REST API is used by all frontends to retrieve, + save, list, delete and perform other actions on notebooks, directories, + and other files through the UI. This is pluggable - the default acts on + the file system, but can be replaced with a different ContentsManager + implementation - to work on S3, Postgres, other object stores, etc. + The events get recorded regardless of the ContentsManager implementation + being used. + + Limitations: + + 1. This does not record all filesystem access, just the ones that happen + explicitly via the notebook server's REST API. Users can (and often do) + trivially access the filesystem in many other ways (such as `open()` calls + in their code), so this is usually never a complete record. + 2. As with all events recorded by the notebook server, users most likely + have the ability to modify the code of the notebook server. Unless other + security measures are in place, these events should be treated as user + controlled and not used in high security areas. + 3. Events are only recorded when an action succeeds. +type: object +required: + - action + - path +properties: + action: + enum: + - get + - create + - save + - upload + - rename + - copy + - delete + description: | + Action performed by the ContentsManager API. + + This is a required field. + + Possible values: + + 1. get + Get contents of a particular file, or list contents of a directory. + + 2. save + Save a file at path with contents from the client + + 3. rename + Rename a file or directory from value in source_path to + value in path. + + 4. copy + Copy a file or directory from value in source_path to + value in path. + + 5. delete + Delete a file or empty directory at given path + path: + type: string + description: | + Logical path on which the operation was performed. + + This is a required field. + source_path: + type: string + description: | + Source path of an operation when action is 'copy' or 'rename' diff --git a/jupyter_server/event_schemas/gateway_client/v1.yaml b/jupyter_server/event_schemas/gateway_client/v1.yaml new file mode 100644 index 0000000000..0a35d2464d --- /dev/null +++ b/jupyter_server/event_schemas/gateway_client/v1.yaml @@ -0,0 +1,40 @@ +"$id": https://events.jupyter.org/jupyter_server/gateway_client/v1 +version: 1 +title: Gateway Client activities. +personal-data: true +description: | + Record events of a gateway client. +type: object +required: + - status + - msg +properties: + status: + enum: + - error + - success + description: | + Status received by Gateway client based on the rest api operation to gateway kernel. + + This is a required field. + + Possible values: + + 1. error + Error response from a rest api operation to gateway kernel. + + 2. success + Success response from a rest api operation to gateway kernel. + status_code: + type: number + description: | + Http response codes from a rest api operation to gateway kernel. + Examples: 200, 400, 502, 503, 599 etc. + msg: + type: string + description: | + Description of the event being emitted. + gateway_url: + type: string + description: | + Gateway url where the remote server exist. diff --git a/jupyter_server/event_schemas/kernel_actions/v1.yaml b/jupyter_server/event_schemas/kernel_actions/v1.yaml new file mode 100644 index 0000000000..e0375e5aaa --- /dev/null +++ b/jupyter_server/event_schemas/kernel_actions/v1.yaml @@ -0,0 +1,80 @@ +"$id": https://events.jupyter.org/jupyter_server/kernel_actions/v1 +version: 1 +title: Kernel Manager activities +personal-data: true +description: | + Record events of a kernel manager. +type: object +required: + - action + - msg +properties: + action: + enum: + - start + - interrupt + - shutdown + - restart + description: | + Action performed by the Kernel Manager. + + This is a required field. + + Possible values: + + 1. start + A kernel has been started with the given kernel id. + + 2. interrupt + A kernel has been interrupted for the given kernel id. + + 3. shutdown + A kernel has been shut down for the given kernel id. + + 4. restart + A kernel has been restarted for the given kernel id. + kernel_id: + type: string + description: | + Kernel id. + + This is a required field for all actions and statuses except action start with status error. + kernel_name: + type: string + description: | + Name of the kernel. + status: + enum: + - error + - success + description: | + Status received from a rest api operation to kernel server. + + This is a required field. + + Possible values: + + 1. error + Error response from a rest api operation to kernel server. + + 2. success + Success response from a rest api operation to kernel server. + status_code: + type: number + description: | + Http response codes from a rest api operation to kernel server. + Examples: 200, 400, 502, 503, 599 etc + msg: + type: string + description: | + Description of the event specified in action. +if: + not: + properties: + status: + const: error + action: + const: start +then: + required: + - kernel_id diff --git a/jupyter_server/extension/application.py b/jupyter_server/extension/application.py index 167f6dd94e..aeeab5a94d 100644 --- a/jupyter_server/extension/application.py +++ b/jupyter_server/extension/application.py @@ -1,6 +1,10 @@ +"""An extension application.""" +from __future__ import annotations + import logging import re import sys +import typing as t from jinja2 import Environment, FileSystemLoader from jupyter_core.application import JupyterApp, NoStart @@ -20,24 +24,24 @@ # ----------------------------------------------------------------------------- -def _preparse_for_subcommand(Application, argv): +def _preparse_for_subcommand(application_klass, argv): """Preparse command line to look for subcommands.""" # Read in arguments from command line. if len(argv) == 0: - return + return None # Find any subcommands. - if Application.subcommands and len(argv) > 0: + if application_klass.subcommands and len(argv) > 0: # we have subcommands, and one may have been specified subc, subargv = argv[0], argv[1:] - if re.match(r"^\w(\-?\w)*$", subc) and subc in Application.subcommands: + if re.match(r"^\w(\-?\w)*$", subc) and subc in application_klass.subcommands: # it's a subcommand, and *not* a flag or class parameter - app = Application() + app = application_klass() app.initialize_subcommand(subc, subargv) return app.subapp -def _preparse_for_stopping_flags(Application, argv): +def _preparse_for_stopping_flags(application_klass, argv): """Looks for 'help', 'version', and 'generate-config; commands in command line. If found, raises the help and version of current Application. @@ -57,19 +61,19 @@ def _preparse_for_stopping_flags(Application, argv): # Catch any help calls. if any(x in interpreted_argv for x in ("-h", "--help-all", "--help")): - app = Application() + app = application_klass() app.print_help("--help-all" in interpreted_argv) app.exit(0) # Catch version commands if "--version" in interpreted_argv or "-V" in interpreted_argv: - app = Application() + app = application_klass() app.print_version() app.exit(0) # Catch generate-config commands. if "--generate-config" in interpreted_argv: - app = Application() + app = application_klass() app.write_default_config() app.exit(0) @@ -84,8 +88,9 @@ class ExtensionAppJinjaMixin(HasTraits): ) ).tag(config=True) + @t.no_type_check def _prepare_templates(self): - # Get templates defined in a subclass. + """Get templates defined in a subclass.""" self.initialize_templates() # Add templates to web app settings if extension has templates. if len(self.template_paths) > 0: @@ -121,12 +126,14 @@ class ExtensionApp(JupyterApp): """Base class for configurable Jupyter Server Extension Applications. ExtensionApp subclasses can be initialized two ways: - 1. Extension is listed as a jpserver_extension, and ServerApp calls - its load_jupyter_server_extension classmethod. This is the - classic way of loading a server extension. - 2. Extension is launched directly by calling its `launch_instance` - class method. This method can be set as a entry_point in - the extensions setup.py + + - Extension is listed as a jpserver_extension, and ServerApp calls + its load_jupyter_server_extension classmethod. This is the + classic way of loading a server extension. + + - Extension is launched directly by calling its `launch_instance` + class method. This method can be set as a entry_point in + the extensions setup.py. """ # Subclasses should override this trait. Tells the server if @@ -137,7 +144,7 @@ class method. This method can be set as a entry_point in # A useful class property that subclasses can override to # configure the underlying Jupyter Server when this extension # is launched directly (using its `launch_instance` method). - serverapp_config = {} + serverapp_config: dict[str, t.Any] = {} # Some subclasses will likely override this trait to flip # the default value to False if they don't offer a browser @@ -153,22 +160,25 @@ class method. This method can be set as a entry_point in @default("open_browser") def _default_open_browser(self): + assert self.serverapp is not None return self.serverapp.config["ServerApp"].get("open_browser", True) @property def config_file_paths(self): """Look on the same path as our parent for config files""" # rely on parent serverapp, which should control all config loading + assert self.serverapp is not None return self.serverapp.config_file_paths # The extension name used to name the jupyter config # file, jupyter_{name}_config. # This should also match the jupyter subcommand used to launch # this extension from the CLI, e.g. `jupyter {name}`. - name = None + name: str | Unicode[str, str] = "ExtensionApp" # type:ignore[assignment] @classmethod def get_extension_package(cls): + """Get an extension package.""" parts = cls.__module__.split(".") if is_namespace_package(parts[0]): # in this case the package name is `.`. @@ -177,6 +187,7 @@ def get_extension_package(cls): @classmethod def get_extension_point(cls): + """Get an extension point.""" return cls.__module__ # Extension URL sets the default landing page for this extension. @@ -199,7 +210,7 @@ def _default_url(self): ] # A ServerApp is not defined yet, but will be initialized below. - serverapp = Any() + serverapp: ServerApp | None = Any() # type:ignore[assignment] @default("serverapp") def _default_serverapp(self): @@ -215,7 +226,7 @@ def _default_serverapp(self): # declare an empty one return ServerApp() - _log_formatter_cls = LogFormatter + _log_formatter_cls = LogFormatter # type:ignore[assignment] @default("log_level") def _default_log_level(self): @@ -235,6 +246,7 @@ def _default_log_format(self): @default("static_url_prefix") def _default_static_url_prefix(self): static_url = f"static/{self.name}/" + assert self.serverapp is not None return url_path_join(self.serverapp.base_url, static_url) static_paths = List( @@ -257,7 +269,9 @@ def _default_static_url_prefix(self): settings = Dict(help=_i18n("""Settings that will passed to the server.""")).tag(config=True) - handlers = List(help=_i18n("""Handlers appended to the server.""")).tag(config=True) + handlers: List[tuple[t.Any, ...]] = List( + help=_i18n("""Handlers appended to the server.""") + ).tag(config=True) def _config_file_name_default(self): """The default config file name.""" @@ -267,15 +281,12 @@ def _config_file_name_default(self): def initialize_settings(self): """Override this method to add handling of settings.""" - pass def initialize_handlers(self): """Override this method to append handlers to a Jupyter Server.""" - pass def initialize_templates(self): """Override this method to add handling of template files.""" - pass def _prepare_config(self): """Builds a Config object from the extension's traits and passes @@ -286,7 +297,9 @@ def _prepare_config(self): self.settings[f"{self.name}_config"] = self.extension_config def _prepare_settings(self): + """Prepare the settings.""" # Make webapp settings accessible to initialize_settings method + assert self.serverapp is not None webapp = self.serverapp.web_app self.settings.update(**webapp.settings) @@ -305,6 +318,8 @@ def _prepare_settings(self): webapp.settings.update(**self.settings) def _prepare_handlers(self): + """Prepare the handlers.""" + assert self.serverapp is not None webapp = self.serverapp.web_app # Get handlers defined by extension subclass. @@ -318,7 +333,7 @@ def _prepare_handlers(self): handler = handler_items[1] # Get handler kwargs, if given - kwargs = {} + kwargs: dict[str, t.Any] = {} if issubclass(handler, ExtensionHandlerMixin): kwargs["name"] = self.name @@ -343,15 +358,16 @@ def _prepare_handlers(self): ) new_handlers.append(handler) - webapp.add_handlers(".*$", new_handlers) + webapp.add_handlers(".*$", new_handlers) # type:ignore[arg-type] def _prepare_templates(self): - # Add templates to web app settings if extension has templates. + """Add templates to web app settings if extension has templates.""" if len(self.template_paths) > 0: self.settings.update({f"{self.name}_template_paths": self.template_paths}) self.initialize_templates() def _jupyter_server_config(self): + """The jupyter server config.""" base_config = { "ServerApp": { "default_url": self.default_url, @@ -362,7 +378,7 @@ def _jupyter_server_config(self): base_config["ServerApp"].update(self.serverapp_config) return base_config - def _link_jupyter_server_extension(self, serverapp): + def _link_jupyter_server_extension(self, serverapp: ServerApp) -> None: """Link the ExtensionApp to an initialized ServerApp. The ServerApp is stored as an attribute and config @@ -401,11 +417,10 @@ def initialize(self): corresponding server app and webapp should already be initialized by this step. - 1) Appends Handlers to the ServerApp, - 2) Passes config and settings from ExtensionApp - to the Tornado web application - 3) Points Tornado Webapp to templates and - static assets. + - Appends Handlers to the ServerApp, + - Passes config and settings from ExtensionApp + to the Tornado web application + - Points Tornado Webapp to templates and static assets. """ if not self.serverapp: msg = ( @@ -427,13 +442,19 @@ def start(self): """ super().start() # Start the server. + assert self.serverapp is not None self.serverapp.start() + def current_activity(self): + """Return a list of activity happening in this extension.""" + return + async def stop_extension(self): """Cleanup any resources managed by this extension.""" def stop(self): """Stop the underlying Jupyter server.""" + assert self.serverapp is not None self.serverapp.stop() self.serverapp.clear_instance() @@ -534,10 +555,20 @@ def load_classic_server_extension(cls, serverapp): ) extension.initialize() + serverapp_class = ServerApp + + @classmethod + def make_serverapp(cls, **kwargs: t.Any) -> ServerApp: + """Instantiate the ServerApp + + Override to customize the ServerApp before it loads any configuration + """ + return cls.serverapp_class.instance(**kwargs) + @classmethod def initialize_server(cls, argv=None, load_other_extensions=True, **kwargs): """Creates an instance of ServerApp and explicitly sets - this extension to enabled=True (i.e. superceding disabling + this extension to enabled=True (i.e. superseding disabling found in other config from files). The `launch_instance` method uses this method to initialize @@ -549,8 +580,8 @@ def initialize_server(cls, argv=None, load_other_extensions=True, **kwargs): jpserver_extensions.update(cls.serverapp_config["jpserver_extensions"]) cls.serverapp_config["jpserver_extensions"] = jpserver_extensions find_extensions = False - serverapp = ServerApp.instance(jpserver_extensions=jpserver_extensions, **kwargs) - serverapp.aliases.update(cls.aliases) + serverapp = cls.make_serverapp(jpserver_extensions=jpserver_extensions, **kwargs) + serverapp.aliases.update(cls.aliases) # type:ignore[has-type] serverapp.initialize( argv=argv or [], starter_extension=cls.name, @@ -565,7 +596,7 @@ def launch_instance(cls, argv=None, **kwargs): extension's landing page. """ # Handle arguments. - if argv is None: + if argv is None: # noqa: SIM108 args = sys.argv[1:] # slice out extension config. else: args = argv @@ -585,10 +616,7 @@ def launch_instance(cls, argv=None, **kwargs): # Log if extension is blocking other extensions from loading. if not cls.load_other_extensions: - serverapp.log.info( - "{ext_name} is running without loading " - "other extensions.".format(ext_name=cls.name) - ) + serverapp.log.info(f"{cls.name} is running without loading other extensions.") # Start the server. try: serverapp.start() diff --git a/jupyter_server/extension/config.py b/jupyter_server/extension/config.py index 15a3cfbd0c..47b4f6cce1 100644 --- a/jupyter_server/extension/config.py +++ b/jupyter_server/extension/config.py @@ -1,3 +1,4 @@ +"""Extension config.""" from jupyter_server.services.config.manager import ConfigManager DEFAULT_SECTION_NAME = "jupyter_server_config" @@ -24,9 +25,11 @@ def enabled(self, name, section_name=DEFAULT_SECTION_NAME, include_root=True): return False def enable(self, name): + """Enable an extension by name.""" data = {"ServerApp": {"jpserver_extensions": {name: True}}} self.update(name, data) def disable(self, name): + """Disable an extension by name.""" data = {"ServerApp": {"jpserver_extensions": {name: False}}} self.update(name, data) diff --git a/jupyter_server/extension/handler.py b/jupyter_server/extension/handler.py index 164d74bb15..55f5aff2c3 100644 --- a/jupyter_server/extension/handler.py +++ b/jupyter_server/extension/handler.py @@ -1,20 +1,32 @@ +"""An extension handler.""" +from __future__ import annotations + +from logging import Logger +from typing import TYPE_CHECKING, Any, cast + from jinja2.exceptions import TemplateNotFound from jupyter_server.base.handlers import FileFindHandler +if TYPE_CHECKING: + from traitlets.config import Config + + from jupyter_server.extension.application import ExtensionApp + from jupyter_server.serverapp import ServerApp + class ExtensionHandlerJinjaMixin: """Mixin class for ExtensionApp handlers that use jinja templating for template rendering. """ - def get_template(self, name): + def get_template(self, name: str) -> str: """Return the jinja template object for a given name""" try: - env = f"{self.name}_jinja2_env" - return self.settings[env].get_template(name) + env = f"{self.name}_jinja2_env" # type:ignore[attr-defined] + return cast(str, self.settings[env].get_template(name)) # type:ignore[attr-defined] except TemplateNotFound: - return super().get_template(name) + return cast(str, super().get_template(name)) # type:ignore[misc] class ExtensionHandlerMixin: @@ -28,49 +40,55 @@ class ExtensionHandlerMixin: other extensions. """ - def initialize(self, name): + settings: dict[str, Any] + + def initialize(self, name: str, *args: Any, **kwargs: Any) -> None: self.name = name + try: + super().initialize(*args, **kwargs) # type:ignore[misc] + except TypeError: + pass @property - def extensionapp(self): - return self.settings[self.name] + def extensionapp(self) -> ExtensionApp: + return cast("ExtensionApp", self.settings[self.name]) @property - def serverapp(self): + def serverapp(self) -> ServerApp: key = "serverapp" - return self.settings[key] + return cast("ServerApp", self.settings[key]) @property - def log(self): + def log(self) -> Logger: if not hasattr(self, "name"): - return super().log + return cast(Logger, super().log) # type:ignore[misc] # Attempt to pull the ExtensionApp's log, otherwise fall back to ServerApp. try: - return self.extensionapp.log + return cast(Logger, self.extensionapp.log) except AttributeError: - return self.serverapp.log + return cast(Logger, self.serverapp.log) @property - def config(self): - return self.settings[f"{self.name}_config"] + def config(self) -> Config: + return cast("Config", self.settings[f"{self.name}_config"]) @property - def server_config(self): - return self.settings["config"] + def server_config(self) -> Config: + return cast("Config", self.settings["config"]) @property - def base_url(self): - return self.settings.get("base_url", "/") + def base_url(self) -> str: + return cast(str, self.settings.get("base_url", "/")) @property - def static_url_prefix(self): + def static_url_prefix(self) -> str: return self.extensionapp.static_url_prefix @property - def static_path(self): - return self.settings[f"{self.name}_static_paths"] + def static_path(self) -> str: + return cast(str, self.settings[f"{self.name}_static_paths"]) - def static_url(self, path, include_host=None, **kwargs): + def static_url(self, path: str, include_host: bool | None = None, **kwargs: Any) -> str: """Returns a static URL for the given relative static file path. This method requires you set the ``{name}_static_path`` setting in your extension (which specifies the root directory @@ -89,13 +107,14 @@ def static_url(self, path, include_host=None, **kwargs): """ key = f"{self.name}_static_paths" try: - self.require_setting(key, "static_url") + self.require_setting(key, "static_url") # type:ignore[attr-defined] except Exception as e: if key in self.settings: - raise Exception( + msg = ( "This extension doesn't have any static paths listed. Check that the " "extension's `static_paths` trait is set." - ) from e + ) + raise Exception(msg) from None else: raise e @@ -104,10 +123,9 @@ def static_url(self, path, include_host=None, **kwargs): if include_host is None: include_host = getattr(self, "include_host", False) + base = "" if include_host: - base = self.request.protocol + "://" + self.request.host - else: - base = "" + base = self.request.protocol + "://" + self.request.host # type:ignore[attr-defined] # Hijack settings dict to send extension templates to extension # static directory. @@ -116,4 +134,4 @@ def static_url(self, path, include_host=None, **kwargs): "static_url_prefix": self.static_url_prefix, } - return base + get_url(settings, path, **kwargs) + return base + cast(str, get_url(settings, path, **kwargs)) diff --git a/jupyter_server/extension/manager.py b/jupyter_server/extension/manager.py index 1efb2cadd0..3509e2e9f6 100644 --- a/jupyter_server/extension/manager.py +++ b/jupyter_server/extension/manager.py @@ -1,19 +1,16 @@ +"""The extension manager.""" +from __future__ import annotations + import importlib -import sys -import traceback +from itertools import starmap from tornado.gen import multi -from traitlets import Any, Bool, Dict, HasTraits, Instance, Unicode, default, observe +from traitlets import Any, Bool, Dict, HasTraits, Instance, List, Unicode, default, observe from traitlets import validate as validate_trait from traitlets.config import LoggingConfigurable from .config import ExtensionConfigManager -from .utils import ( - ExtensionMetadataError, - ExtensionModuleNotFound, - get_loader, - get_metadata, -) +from .utils import ExtensionMetadataError, ExtensionModuleNotFound, get_loader, get_metadata class ExtensionPoint(HasTraits): @@ -28,22 +25,23 @@ class ExtensionPoint(HasTraits): @validate_trait("metadata") def _valid_metadata(self, proposed): + """Validate metadata.""" metadata = proposed["value"] # Verify that the metadata has a "name" key. try: self._module_name = metadata["module"] except KeyError: - raise ExtensionMetadataError( - "There is no 'module' key in the extension's metadata packet." - ) + msg = "There is no 'module' key in the extension's metadata packet." + raise ExtensionMetadataError(msg) from None try: self._module = importlib.import_module(self._module_name) except ImportError: - raise ExtensionModuleNotFound( - "The submodule '{}' could not be found. Are you " - "sure the extension is installed?".format(self._module_name) + msg = ( + f"The submodule '{self._module_name}' could not be found. Are you " + "sure the extension is installed?" ) + raise ExtensionModuleNotFound(msg) from None # If the metadata includes an ExtensionApp, create an instance. if "app" in metadata: self._app = metadata["app"]() @@ -99,6 +97,7 @@ def module(self): return self._module def _get_linker(self): + """Get a linker.""" if self.app: linker = self.app._link_jupyter_server_extension else: @@ -112,6 +111,7 @@ def _get_linker(self): return linker def _get_loader(self): + """Get a loader.""" loc = self.app if not loc: loc = self.module @@ -150,7 +150,7 @@ def load(self, serverapp): return loader(serverapp) -class ExtensionPackage(HasTraits): +class ExtensionPackage(LoggingConfigurable): """An API for interfacing with a Jupyter Server extension package. Usage: @@ -160,74 +160,74 @@ class ExtensionPackage(HasTraits): """ name = Unicode(help="Name of the an importable Python package.") - enabled = Bool(False).tag(config=True) + enabled = Bool(False, help="Whether the extension package is enabled.") + + _linked_points = Dict() + extension_points = Dict() + module = Any(allow_none=True, help="The module for this extension package. None if not enabled") + metadata = List(Dict(), help="Extension metadata loaded from the extension package.") + version = Unicode( + help=""" + The version of this extension package, if it can be found. + Otherwise, an empty string. + """, + ) - def __init__(self, *args, **kwargs): - # Store extension points that have been linked. - self._linked_points = {} - super().__init__(*args, **kwargs) + @default("version") + def _load_version(self): + if not self.enabled: + return "" + return getattr(self.module, "__version__", "") - _linked_points = {} + def __init__(self, **kwargs): + """Initialize an extension package.""" + super().__init__(**kwargs) + if self.enabled: + self._load_metadata() - @validate_trait("name") - def _validate_name(self, proposed): - name = proposed["value"] - self._extension_points = {} + def _load_metadata(self): + """Import package and load metadata + + Only used if extension package is enabled + """ + name = self.name try: - self._module, self._metadata = get_metadata(name) - except ImportError: - raise ExtensionModuleNotFound( - "The module '{name}' could not be found. Are you " - "sure the extension is installed?".format(name=name) + self.module, self.metadata = get_metadata(name, logger=self.log) + except ImportError as e: + msg = ( + f"The module '{name}' could not be found ({e}). Are you " + "sure the extension is installed?" ) + raise ExtensionModuleNotFound(msg) from None # Create extension point interfaces for each extension path. - for m in self._metadata: + for m in self.metadata: point = ExtensionPoint(metadata=m) - self._extension_points[point.name] = point + self.extension_points[point.name] = point return name - @property - def module(self): - """Extension metadata loaded from the extension package.""" - return self._module - - @property - def version(self): - """Get the version of this package, if it's given. Otherwise, return an empty string""" - return getattr(self._module, "__version__", "") - - @property - def metadata(self): - """Extension metadata loaded from the extension package.""" - return self._metadata - - @property - def extension_points(self): - """A dictionary of extension points.""" - return self._extension_points - def validate(self): """Validate all extension points in this package.""" - for extension in self.extension_points.values(): - if not extension.validate(): - return False - return True + return all(extension.validate() for extension in self.extension_points.values()) def link_point(self, point_name, serverapp): + """Link an extension point.""" linked = self._linked_points.get(point_name, False) if not linked: point = self.extension_points[point_name] point.link(serverapp) def load_point(self, point_name, serverapp): + """Load an extension point.""" point = self.extension_points[point_name] return point.load(serverapp) def link_all_points(self, serverapp): + """Link all extension points.""" for point_name in self.extension_points: self.link_point(point_name, serverapp) def load_all_points(self, serverapp): + """Load all extension points.""" return [self.load_point(point_name, serverapp) for point_name in self.extension_points] @@ -324,12 +324,19 @@ def add_extension(self, extension_name, enabled=False): return True # Raise a warning if the extension cannot be loaded. except Exception as e: - if self.serverapp.reraise_server_extension_failures: + if self.serverapp and self.serverapp.reraise_server_extension_failures: raise - self.log.warning(e) + self.log.warning( + "%s | error adding extension (enabled: %s): %s", + extension_name, + enabled, + e, + exc_info=True, + ) return False def link_extension(self, name): + """Link an extension by name.""" linked = self.linked_extensions.get(name, False) extension = self.extensions[name] if not linked and extension.enabled: @@ -337,36 +344,34 @@ def link_extension(self, name): # Link extension and store links extension.link_all_points(self.serverapp) self.linked_extensions[name] = True - self.log.info(f"{name} | extension was successfully linked.") + self.log.info("%s | extension was successfully linked.", name) except Exception as e: - if self.serverapp.reraise_server_extension_failures: + if self.serverapp and self.serverapp.reraise_server_extension_failures: raise - self.log.warning(e) + self.log.warning("%s | error linking extension: %s", name, e, exc_info=True) def load_extension(self, name): + """Load an extension by name.""" extension = self.extensions.get(name) - if extension.enabled: + if extension and extension.enabled: try: extension.load_all_points(self.serverapp) except Exception as e: - if self.serverapp.reraise_server_extension_failures: + if self.serverapp and self.serverapp.reraise_server_extension_failures: raise - self.log.debug("".join(traceback.format_exception(*sys.exc_info()))) self.log.warning( - "{name} | extension failed loading with message: {error}".format( - name=name, error=str(e) - ) + "%s | extension failed loading with message: %r", name, e, exc_info=True ) else: - self.log.info(f"{name} | extension was successfully loaded.") + self.log.info("%s | extension was successfully loaded.", name) async def stop_extension(self, name, apps): """Call the shutdown hooks in the specified apps.""" for app in apps: - self.log.debug(f'{name} | extension app "{app.name}" stopping') + self.log.debug("%s | extension app %r stopping", name, app.name) await app.stop_extension() - self.log.debug(f'{name} | extension app "{app.name}" stopped') + self.log.debug("%s | extension app %r stopped", name, app.name) def link_all_extensions(self): """Link all enabled extensions @@ -374,7 +379,7 @@ def link_all_extensions(self): """ # Sort the extension names to enforce deterministic linking # order. - for name in self.sorted_extensions.keys(): + for name in self.sorted_extensions: self.link_extension(name) def load_all_extensions(self): @@ -383,14 +388,16 @@ def load_all_extensions(self): """ # Sort the extension names to enforce deterministic loading # order. - for name in self.sorted_extensions.keys(): + for name in self.sorted_extensions: self.load_extension(name) async def stop_all_extensions(self): """Call the shutdown hooks in all extensions.""" - await multi( - [ - self.stop_extension(name, apps) - for name, apps in sorted(dict(self.extension_apps).items()) - ] - ) + await multi(list(starmap(self.stop_extension, sorted(dict(self.extension_apps).items())))) + + def any_activity(self): + """Check for any activity currently happening across all extension applications.""" + for _, apps in sorted(dict(self.extension_apps).items()): + for app in apps: + if app.current_activity(): + return True diff --git a/jupyter_server/extension/serverextension.py b/jupyter_server/extension/serverextension.py index 23c1bde231..19f3a30709 100644 --- a/jupyter_server/extension/serverextension.py +++ b/jupyter_server/extension/serverextension.py @@ -1,8 +1,12 @@ """Utilities for installing extensions""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + +import logging import os import sys +import typing as t from jupyter_core.application import JupyterApp from jupyter_core.paths import ENV_CONFIG_PATH, SYSTEM_CONFIG_PATH, jupyter_config_dir @@ -14,7 +18,7 @@ from jupyter_server.extension.manager import ExtensionManager, ExtensionPackage -def _get_config_dir(user=False, sys_prefix=False): +def _get_config_dir(user: bool = False, sys_prefix: bool = False) -> str: """Get the location of config files for the current context Returns the string to the environment @@ -37,7 +41,9 @@ def _get_config_dir(user=False, sys_prefix=False): return extdir -def _get_extmanager_for_context(write_dir="jupyter_server_config.d", user=False, sys_prefix=False): +def _get_extmanager_for_context( + write_dir: str = "jupyter_server_config.d", user: bool = False, sys_prefix: bool = False +) -> tuple[str, ExtensionManager]: """Get an extension manager pointing at the current context Returns the path to the current context and an ExtensionManager object. @@ -66,7 +72,7 @@ class ArgumentConflict(ValueError): pass -_base_flags = {} +_base_flags: dict[str, t.Any] = {} _base_flags.update(JupyterApp.flags) _base_flags.pop("y", None) _base_flags.pop("generate-config", None) @@ -109,14 +115,14 @@ class ArgumentConflict(ValueError): ) _base_flags["python"] = _base_flags["py"] -_base_aliases = {} +_base_aliases: dict[str, t.Any] = {} _base_aliases.update(JupyterApp.aliases) class BaseExtensionApp(JupyterApp): """Base extension installer app""" - _log_formatter_cls = LogFormatter + _log_formatter_cls = LogFormatter # type:ignore[assignment] flags = _base_flags aliases = _base_aliases version = __version__ @@ -125,12 +131,12 @@ class BaseExtensionApp(JupyterApp): sys_prefix = Bool(True, config=True, help="Use the sys.prefix as the prefix") python = Bool(False, config=True, help="Install from a Python package") - def _log_format_default(self): + def _log_format_default(self) -> str: """A default format for messages""" return "%(message)s" @property - def config_dir(self): + def config_dir(self) -> str: # type:ignore[override] return _get_config_dir(user=self.user, sys_prefix=self.sys_prefix) @@ -147,8 +153,12 @@ def config_dir(self): def toggle_server_extension_python( - import_name, enabled=None, parent=None, user=False, sys_prefix=True -): + import_name: str, + enabled: bool | None = None, + parent: t.Any = None, + user: bool = False, + sys_prefix: bool = True, +) -> None: """Toggle the boolean setting for a given server extension in a Jupyter config file. """ @@ -212,11 +222,14 @@ def toggle_server_extension_python( flags["python"] = flags["py"] +_desc = "Enable/disable a server extension using frontend configuration files." + + class ToggleServerExtensionApp(BaseExtensionApp): """A base class for enabling/disabling extensions""" name = "jupyter server extension enable/disable" - description = "Enable/disable a server extension using frontend configuration files." + description = _desc flags = flags @@ -224,7 +237,7 @@ class ToggleServerExtensionApp(BaseExtensionApp): _toggle_pre_message = "" _toggle_post_message = "" - def toggle_server_extension(self, import_name): + def toggle_server_extension(self, import_name: str) -> None: """Change the status of a named server extension. Uses the value of `self._toggle_value`. @@ -253,17 +266,18 @@ def toggle_server_extension(self, import_name): # Toggle extension config. config = extension_manager.config_manager - if self._toggle_value is True: - config.enable(import_name) - else: - config.disable(import_name) + if config: + if self._toggle_value is True: + config.enable(import_name) + else: + config.disable(import_name) # If successful, let's log. self.log.info(f" - Extension successfully {self._toggle_post_message}.") except Exception as err: self.log.info(f" {RED_X} Validation failed: {err}") - def start(self): + def start(self) -> None: """Perform the App's actions as configured""" if not self.extra_args: sys.exit("Please specify a server extension/package to enable or disable") @@ -281,7 +295,7 @@ class EnableServerExtensionApp(ToggleServerExtensionApp): Usage jupyter server extension enable [--system|--sys-prefix] """ - _toggle_value = True + _toggle_value = True # type:ignore[assignment] _toggle_pre_message = "enabling" _toggle_post_message = "enabled" @@ -296,7 +310,7 @@ class DisableServerExtensionApp(ToggleServerExtensionApp): Usage jupyter server extension disable [--system|--sys-prefix] """ - _toggle_value = False + _toggle_value = False # type:ignore[assignment] _toggle_pre_message = "disabling" _toggle_post_message = "disabled" @@ -308,7 +322,7 @@ class ListServerExtensionsApp(BaseExtensionApp): version = __version__ description = "List all server extensions known by the configuration system" - def list_server_extensions(self): + def list_server_extensions(self) -> None: """List all enabled and disabled server extensions, by config path Enabled extensions are validated, potentially generating warnings. @@ -320,24 +334,34 @@ def list_server_extensions(self): ) for option in configurations: - config_dir, ext_manager = _get_extmanager_for_context(**option) + config_dir = _get_config_dir(**option) self.log.info(f"Config dir: {config_dir}") - for name, extension in ext_manager.extensions.items(): - enabled = extension.enabled + write_dir = "jupyter_server_config.d" + config_manager = ExtensionConfigManager( + read_config_path=[config_dir], + write_config_dir=os.path.join(config_dir, write_dir), + ) + jpserver_extensions = config_manager.get_jpserver_extensions() + for name, enabled in jpserver_extensions.items(): # Attempt to get extension metadata self.log.info(f" {name} {GREEN_ENABLED if enabled else RED_DISABLED}") try: self.log.info(f" - Validating {name}...") + extension = ExtensionPackage(name=name, enabled=enabled) if not extension.validate(): - raise ValueError("validation failed") + msg = "validation failed" + raise ValueError(msg) version = extension.version self.log.info(f" {name} {version} {GREEN_OK}") except Exception as err: - self.log.warning(f" {RED_X} {err}") + exc_info = False + if int(self.log_level) <= logging.DEBUG: # type:ignore[call-overload] + exc_info = True + self.log.warning(f" {RED_X} {err}", exc_info=exc_info) # Add a blank line between paths. self.log.info("") - def start(self): + def start(self) -> None: """Perform the App's actions as configured""" self.list_server_extensions() @@ -354,16 +378,16 @@ class ServerExtensionApp(BaseExtensionApp): name = "jupyter server extension" version = __version__ - description = "Work with Jupyter server extensions" + description: str = "Work with Jupyter server extensions" examples = _examples - subcommands = dict( - enable=(EnableServerExtensionApp, "Enable a server extension"), - disable=(DisableServerExtensionApp, "Disable a server extension"), - list=(ListServerExtensionsApp, "List server extensions"), - ) + subcommands: dict[str, t.Any] = { + "enable": (EnableServerExtensionApp, "Enable a server extension"), + "disable": (DisableServerExtensionApp, "Disable a server extension"), + "list": (ListServerExtensionsApp, "List server extensions"), + } - def start(self): + def start(self) -> None: """Perform the App's actions as configured""" super().start() diff --git a/jupyter_server/extension/utils.py b/jupyter_server/extension/utils.py index a8c93a0580..5d18939ab2 100644 --- a/jupyter_server/extension/utils.py +++ b/jupyter_server/extension/utils.py @@ -1,21 +1,23 @@ +"""Extension utilities.""" import importlib +import time import warnings class ExtensionLoadingError(Exception): - pass + """An extension loading error.""" class ExtensionMetadataError(Exception): - pass + """An extension metadata error.""" class ExtensionModuleNotFound(Exception): - pass + """An extension module not found error.""" class NotAnExtensionApp(Exception): - pass + """An error raised when a module is not an extension.""" def get_loader(obj, logger=None): @@ -26,19 +28,25 @@ def get_loader(obj, logger=None): underscore prefix. """ try: - func = getattr(obj, "_load_jupyter_server_extension") # noqa B009 + return obj._load_jupyter_server_extension except AttributeError: - func = getattr(obj, "load_jupyter_server_extension", None) - warnings.warn( - "A `_load_jupyter_server_extension` function was not " - "found in {name!s}. Instead, a `load_jupyter_server_extension` " - "function was found and will be used for now. This function " - "name will be deprecated in future releases " - "of Jupyter Server.".format(name=obj), - DeprecationWarning, - ) - except Exception: - raise ExtensionLoadingError("_load_jupyter_server_extension function was not found.") + pass + + try: + func = obj.load_jupyter_server_extension + except AttributeError: + msg = "_load_jupyter_server_extension function was not found." + raise ExtensionLoadingError(msg) from None + + warnings.warn( + "A `_load_jupyter_server_extension` function was not " + f"found in {obj!s}. Instead, a `load_jupyter_server_extension` " + "function was found and will be used for now. This function " + "name will be deprecated in future releases " + "of Jupyter Server.", + DeprecationWarning, + stacklevel=2, + ) return func @@ -47,12 +55,20 @@ def get_metadata(package_name, logger=None): This looks for a `_jupyter_server_extension_points` function that returns metadata about all extension points within a Jupyter - Server Extension pacakge. + Server Extension package. If it doesn't exist, return a basic metadata packet given the module name. """ + start_time = time.perf_counter() module = importlib.import_module(package_name) + end_time = time.perf_counter() + duration = end_time - start_time + # Sometimes packages can take a *while* to import, so we report how long + # each module took to import. This makes it much easier for users to report + # slow loading modules upstream, as slow loading modules will block server startup + if logger: + logger.info(f"Package {package_name} took {duration:.4f}s to import") try: return module, module._jupyter_server_extension_points() @@ -67,10 +83,10 @@ def get_metadata(package_name, logger=None): if logger: logger.warning( "A `_jupyter_server_extension_points` function was not " - "found in {name}. Instead, a `_jupyter_server_extension_paths` " + f"found in {package_name}. Instead, a `_jupyter_server_extension_paths` " "function was found and will be used for now. This function " "name will be deprecated in future releases " - "of Jupyter Server.".format(name=package_name) + "of Jupyter Server." ) return module, extension_points except AttributeError: @@ -81,9 +97,9 @@ def get_metadata(package_name, logger=None): if logger: logger.debug( "A `_jupyter_server_extension_points` function was " - "not found in {name}, so Jupyter Server will look " + f"not found in {package_name}, so Jupyter Server will look " "for extension points in the extension pacakge's " - "root.".format(name=package_name) + "root." ) return module, [{"module": package_name, "name": package_name}] diff --git a/jupyter_server/files/handlers.py b/jupyter_server/files/handlers.py index c76fdc28d3..043c581034 100644 --- a/jupyter_server/files/handlers.py +++ b/jupyter_server/files/handlers.py @@ -1,20 +1,22 @@ """Serve files directly from the ContentsManager.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import json +from __future__ import annotations + import mimetypes from base64 import decodebytes +from typing import Awaitable +from jupyter_core.utils import ensure_async from tornado import web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.utils import ensure_async AUTH_RESOURCE = "contents" -class FilesHandler(JupyterHandler): +class FilesHandler(JupyterHandler, web.StaticFileHandler): """serve files via ContentsManager Normally used when ContentsManager is not a FileContentsManager. @@ -27,13 +29,15 @@ class FilesHandler(JupyterHandler): @property def content_security_policy(self): + """The content security policy.""" # In case we're serving HTML/SVG, confine any Javascript to a unique # origin so it can't interact with the notebook server. return super().content_security_policy + "; sandbox allow-scripts" @web.authenticated @authorized - def head(self, path): + def head(self, path: str) -> Awaitable[None] | None: # type:ignore[override] + """The head response.""" self.get(path, include_body=False) self.check_xsrf_cookie() return self.get(path, include_body=False) @@ -41,11 +45,12 @@ def head(self, path): @web.authenticated @authorized async def get(self, path, include_body=True): + """Get a file by path.""" # /files/ requests must originate from the same site self.check_xsrf_cookie() cm = self.contents_manager - if await ensure_async(cm.is_hidden(path)) and not cm.allow_hidden: + if not cm.allow_hidden and await ensure_async(cm.is_hidden(path)): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) @@ -57,7 +62,7 @@ async def get(self, path, include_body=True): model = await ensure_async(cm.get(path, type="file", content=include_body)) - if self.get_argument("download", False): + if self.get_argument("download", None): self.set_attachment_header(name) # get mimetype from filename @@ -74,21 +79,18 @@ async def get(self, path, include_body=True): self.set_header("Content-Type", "application/octet-stream") elif cur_mime is not None: self.set_header("Content-Type", cur_mime) + elif model["format"] == "base64": + self.set_header("Content-Type", "application/octet-stream") else: - if model["format"] == "base64": - self.set_header("Content-Type", "application/octet-stream") - else: - self.set_header("Content-Type", "text/plain; charset=UTF-8") + self.set_header("Content-Type", "text/plain; charset=UTF-8") if include_body: if model["format"] == "base64": b64_bytes = model["content"].encode("ascii") self.write(decodebytes(b64_bytes)) - elif model["format"] == "json": - self.write(json.dumps(model["content"])) else: self.write(model["content"]) self.flush() -default_handlers = [] +default_handlers: list[JupyterHandler] = [] diff --git a/jupyter_server/gateway/connections.py b/jupyter_server/gateway/connections.py new file mode 100644 index 0000000000..028a0f8f4e --- /dev/null +++ b/jupyter_server/gateway/connections.py @@ -0,0 +1,176 @@ +"""Gateway connection classes.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +from __future__ import annotations + +import asyncio +import logging +import random +from typing import Any, cast + +import tornado.websocket as tornado_websocket +from tornado.concurrent import Future +from tornado.escape import json_decode, url_escape, utf8 +from tornado.httpclient import HTTPRequest +from tornado.ioloop import IOLoop +from traitlets import Bool, Instance, Int + +from ..services.kernels.connection.base import BaseKernelWebsocketConnection +from ..utils import url_path_join +from .gateway_client import GatewayClient + + +class GatewayWebSocketConnection(BaseKernelWebsocketConnection): + """Web socket connection that proxies to a kernel/enterprise gateway.""" + + ws = Instance(klass=tornado_websocket.WebSocketClientConnection, allow_none=True) + + ws_future = Instance(klass=Future, allow_none=True) + + disconnected = Bool(False) + + retry = Int(0) + + async def connect(self): + """Connect to the socket.""" + # websocket is initialized before connection + self.ws = None + ws_url = url_path_join( + GatewayClient.instance().ws_url or "", + GatewayClient.instance().kernels_endpoint, + url_escape(self.kernel_id), + "channels", + ) + self.log.info(f"Connecting to {ws_url}") + kwargs: dict[str, Any] = {} + kwargs = GatewayClient.instance().load_connection_args(**kwargs) + + request = HTTPRequest(ws_url, **kwargs) + self.ws_future = cast("Future[Any]", tornado_websocket.websocket_connect(request)) + self.ws_future.add_done_callback(self._connection_done) + + loop = IOLoop.current() + loop.add_future(self.ws_future, lambda future: self._read_messages()) + + def _connection_done(self, fut): + """Handle a finished connection.""" + if ( + not self.disconnected and fut.exception() is None + ): # prevent concurrent.futures._base.CancelledError + self.ws = fut.result() + self.retry = 0 + self.log.debug(f"Connection is ready: ws: {self.ws}") + else: + self.log.warning( + "Websocket connection has been closed via client disconnect or due to error. " + "Kernel with ID '{}' may not be terminated on GatewayClient: {}".format( + self.kernel_id, GatewayClient.instance().url + ) + ) + + def disconnect(self): + """Handle a disconnect.""" + self.disconnected = True + if self.ws is not None: + # Close connection + self.ws.close() + elif self.ws_future and not self.ws_future.done(): + # Cancel pending connection. Since future.cancel() is a noop on tornado, we'll track cancellation locally + self.ws_future.cancel() + self.log.debug(f"_disconnect: future cancelled, disconnected: {self.disconnected}") + + async def _read_messages(self): + """Read messages from gateway server.""" + while self.ws is not None: + message = None + if not self.disconnected: + try: + message = await self.ws.read_message() + except Exception as e: + self.log.error( + f"Exception reading message from websocket: {e}" + ) # , exc_info=True) + if message is None: + if not self.disconnected: + self.log.warning(f"Lost connection to Gateway: {self.kernel_id}") + break + if isinstance(message, bytes): + message = message.decode("utf8") + self.handle_outgoing_message( + message + ) # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) + else: # ws cancelled - stop reading + break + + # NOTE(esevan): if websocket is not disconnected by client, try to reconnect. + if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max: + jitter = random.randint(10, 100) * 0.01 + retry_interval = ( + min( + GatewayClient.instance().gateway_retry_interval * (2**self.retry), + GatewayClient.instance().gateway_retry_interval_max, + ) + + jitter + ) + self.retry += 1 + self.log.info( + "Attempting to re-establish the connection to Gateway in %s secs (%s/%s): %s", + retry_interval, + self.retry, + GatewayClient.instance().gateway_retry_max, + self.kernel_id, + ) + await asyncio.sleep(retry_interval) + loop = IOLoop.current() + loop.spawn_callback(self.connect) + + def handle_outgoing_message(self, incoming_msg: str, *args: Any) -> None: + """Send message to the notebook client.""" + try: + self.websocket_handler.write_message(incoming_msg) + except tornado_websocket.WebSocketClosedError: + if self.log.isEnabledFor(logging.DEBUG): + msg_summary = GatewayWebSocketConnection._get_message_summary( + json_decode(utf8(incoming_msg)) + ) + self.log.debug( + f"Notebook client closed websocket connection - message dropped: {msg_summary}" + ) + + def handle_incoming_message(self, message: str) -> None: + """Send message to gateway server.""" + if self.ws is None and self.ws_future is not None: + loop = IOLoop.current() + loop.add_future(self.ws_future, lambda future: self.handle_incoming_message(message)) + else: + self._write_message(message) + + def _write_message(self, message): + """Send message to gateway server.""" + try: + if not self.disconnected and self.ws is not None: + self.ws.write_message(message) + except Exception as e: + self.log.error(f"Exception writing message to websocket: {e}") # , exc_info=True) + + @staticmethod + def _get_message_summary(message): + """Get a summary of a message.""" + summary = [] + message_type = message["msg_type"] + summary.append(f"type: {message_type}") + + if message_type == "status": + summary.append(", state: {}".format(message["content"]["execution_state"])) + elif message_type == "error": + summary.append( + ", {}:{}:{}".format( + message["content"]["ename"], + message["content"]["evalue"], + message["content"]["traceback"], + ) + ) + else: + summary.append(", ...") # don't display potentially sensitive data + + return "".join(summary) diff --git a/jupyter_server/gateway/gateway_client.py b/jupyter_server/gateway/gateway_client.py index 396a9a1abc..437d54d227 100644 --- a/jupyter_server/gateway/gateway_client.py +++ b/jupyter_server/gateway/gateway_client.py @@ -1,30 +1,127 @@ +"""A kernel gateway client.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + +import asyncio import json +import logging import os +import typing as ty +from abc import ABC, ABCMeta, abstractmethod +from datetime import datetime, timezone +from email.utils import parsedate_to_datetime +from http.cookies import SimpleCookie from socket import gaierror +from jupyter_events import EventLogger from tornado import web -from tornado.httpclient import AsyncHTTPClient, HTTPError -from traitlets import Bool, Float, Int, TraitError, Unicode, default, validate -from traitlets.config import SingletonConfigurable +from tornado.httpclient import AsyncHTTPClient, HTTPClientError, HTTPResponse +from traitlets import ( + Bool, + Float, + Instance, + Int, + TraitError, + Type, + Unicode, + default, + observe, + validate, +) +from traitlets.config import LoggingConfigurable, SingletonConfigurable + +from jupyter_server import DEFAULT_EVENTS_SCHEMA_PATH, JUPYTER_SERVER_EVENTS_URI + +ERROR_STATUS = "error" +SUCCESS_STATUS = "success" +STATUS_KEY = "status" +STATUS_CODE_KEY = "status_code" +MESSAGE_KEY = "msg" + +if ty.TYPE_CHECKING: + from http.cookies import Morsel + + +class GatewayTokenRenewerMeta(ABCMeta, type(LoggingConfigurable)): # type: ignore[misc] + """The metaclass necessary for proper ABC behavior in a Configurable.""" + + +class GatewayTokenRenewerBase( # type:ignore[misc] + ABC, LoggingConfigurable, metaclass=GatewayTokenRenewerMeta +): + """ + Abstract base class for refreshing tokens used between this server and a Gateway + server. Implementations requiring additional configuration can extend their class + with appropriate configuration values or convey those values via appropriate + environment variables relative to the implementation. + """ + @abstractmethod + def get_token( + self, + auth_header_key: str, + auth_scheme: ty.Union[str, None], + auth_token: str, + **kwargs: ty.Any, + ) -> str: + """ + Given the current authorization header key, scheme, and token, this method returns + a (potentially renewed) token for use against the Gateway server. + """ -class GatewayClient(SingletonConfigurable): - """This class manages the configuration. It's its own singleton class so that we - can share these values across all objects. It also contains some helper methods - to build request arguments out of the various config options. +class NoOpTokenRenewer(GatewayTokenRenewerBase): # type:ignore[misc] + """NoOpTokenRenewer is the default value to the GatewayClient trait + `gateway_token_renewer` and merely returns the provided token. """ + def get_token( + self, + auth_header_key: str, + auth_scheme: ty.Union[str, None], + auth_token: str, + **kwargs: ty.Any, + ) -> str: + """This implementation simply returns the current authorization token.""" + return auth_token + + +class GatewayClient(SingletonConfigurable): + """This class manages the configuration. It's its own singleton class so + that we can share these values across all objects. It also contains some + options. + helper methods to build request arguments out of the various config + """ + + event_schema_id = JUPYTER_SERVER_EVENTS_URI + "/gateway_client/v1" + event_logger = Instance(EventLogger).tag(config=True) + + @default("event_logger") + def _default_event_logger(self): + if self.parent and hasattr(self.parent, "event_logger"): + # Event logger is attached from serverapp. + return self.parent.event_logger + else: + # If parent does not have an event logger, create one. + logger = EventLogger() + schema_path = DEFAULT_EVENTS_SCHEMA_PATH / "gateway_client" / "v1.yaml" + logger.register_event_schema(schema_path) + self.log.info("Event is registered in GatewayClient.") + return logger + + def emit(self, data): + """Emit event using the core event schema from Jupyter Server's Gateway Client.""" + self.event_logger.emit(schema_id=self.event_schema_id, data=data) + url = Unicode( default_value=None, allow_none=True, config=True, help="""The url of the Kernel or Enterprise Gateway server where - kernel specifications are defined and kernel management takes place. - If defined, this Notebook server acts as a proxy for all kernel - management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) +kernel specifications are defined and kernel management takes place. +If defined, this Notebook server acts as a proxy for all kernel +management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) """, ) @@ -38,9 +135,10 @@ def _url_default(self): def _url_validate(self, proposal): value = proposal["value"] # Ensure value, if present, starts with 'http' - if value is not None and len(value) > 0: - if not str(value).lower().startswith("http"): - raise TraitError("GatewayClient url must start with 'http': '%r'" % value) + if value is not None and len(value) > 0 and not str(value).lower().startswith("http"): + message = "GatewayClient url must start with 'http': '%r'" % value + self.emit(data={STATUS_KEY: ERROR_STATUS, STATUS_CODE_KEY: 400, MESSAGE_KEY: message}) + raise TraitError(message) return value ws_url = Unicode( @@ -48,7 +146,7 @@ def _url_validate(self, proposal): allow_none=True, config=True, help="""The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value - will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) +will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) """, ) @@ -57,18 +155,18 @@ def _url_validate(self, proposal): @default("ws_url") def _ws_url_default(self): default_value = os.environ.get(self.ws_url_env) - if default_value is None: - if self.gateway_enabled: - default_value = self.url.lower().replace("http", "ws") + if self.url is not None and default_value is None and self.gateway_enabled: + default_value = self.url.lower().replace("http", "ws") return default_value @validate("ws_url") def _ws_url_validate(self, proposal): value = proposal["value"] # Ensure value, if present, starts with 'ws' - if value is not None and len(value) > 0: - if not str(value).lower().startswith("ws"): - raise TraitError("GatewayClient ws_url must start with 'ws': '%r'" % value) + if value is not None and len(value) > 0 and not str(value).lower().startswith("ws"): + message = "GatewayClient ws_url must start with 'ws': '%r'" % value + self.emit(data={STATUS_KEY: ERROR_STATUS, STATUS_CODE_KEY: 400, MESSAGE_KEY: message}) + raise TraitError(message) return value kernels_endpoint_default_value = "/api/kernels" @@ -103,7 +201,7 @@ def _kernelspecs_endpoint_default(self): default_value=kernelspecs_resource_endpoint_default_value, config=True, help="""The gateway endpoint for accessing kernelspecs resources - (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""", +(JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""", ) @default("kernelspecs_resource_endpoint") @@ -119,16 +217,14 @@ def _kernelspecs_resource_endpoint_default(self): default_value=connect_timeout_default_value, config=True, help="""The time allowed for HTTP connection establishment with the Gateway server. - (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""", +(JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""", ) @default("connect_timeout") - def connect_timeout_default(self): - return float( - os.environ.get("JUPYTER_GATEWAY_CONNECT_TIMEOUT", self.connect_timeout_default_value) - ) + def _connect_timeout_default(self): + return float(os.environ.get(self.connect_timeout_env, self.connect_timeout_default_value)) - request_timeout_default_value = 40.0 + request_timeout_default_value = 42.0 request_timeout_env = "JUPYTER_GATEWAY_REQUEST_TIMEOUT" request_timeout = Float( default_value=request_timeout_default_value, @@ -137,10 +233,8 @@ def connect_timeout_default(self): ) @default("request_timeout") - def request_timeout_default(self): - return float( - os.environ.get("JUPYTER_GATEWAY_REQUEST_TIMEOUT", self.request_timeout_default_value) - ) + def _request_timeout_default(self): + return float(os.environ.get(self.request_timeout_env, self.request_timeout_default_value)) client_key = Unicode( default_value=None, @@ -222,36 +316,54 @@ def _http_pwd_default(self): def _headers_default(self): return os.environ.get(self.headers_env, self.headers_default_value) + auth_header_key_default_value = "Authorization" + auth_header_key = Unicode( + config=True, + help="""The authorization header's key name (typically 'Authorization') used in the HTTP headers. The +header will be formatted as:: + +{'{auth_header_key}': '{auth_scheme} {auth_token}'} + +If the authorization header key takes a single value, `auth_scheme` should be set to None and +'auth_token' should be configured to use the appropriate value. + +(JUPYTER_GATEWAY_AUTH_HEADER_KEY env var)""", + ) + auth_header_key_env = "JUPYTER_GATEWAY_AUTH_HEADER_KEY" + + @default("auth_header_key") + def _auth_header_key_default(self): + return os.environ.get(self.auth_header_key_env, self.auth_header_key_default_value) + + auth_token_default_value = "" auth_token = Unicode( default_value=None, allow_none=True, config=True, help="""The authorization token used in the HTTP headers. The header will be formatted as:: - { - 'Authorization': '{auth_scheme} {auth_token}' - } +{'{auth_header_key}': '{auth_scheme} {auth_token}'} - (JUPYTER_GATEWAY_AUTH_TOKEN env var)""", +(JUPYTER_GATEWAY_AUTH_TOKEN env var)""", ) auth_token_env = "JUPYTER_GATEWAY_AUTH_TOKEN" @default("auth_token") def _auth_token_default(self): - return os.environ.get(self.auth_token_env, "") + return os.environ.get(self.auth_token_env, self.auth_token_default_value) + auth_scheme_default_value = "token" # This value is purely for backwards compatibility auth_scheme = Unicode( - default_value=None, allow_none=True, config=True, help="""The auth scheme, added as a prefix to the authorization token used in the HTTP headers. - (JUPYTER_GATEWAY_AUTH_SCHEME env var)""", +(JUPYTER_GATEWAY_AUTH_SCHEME env var)""", ) auth_scheme_env = "JUPYTER_GATEWAY_AUTH_SCHEME" @default("auth_scheme") def _auth_scheme_default(self): - return os.environ.get(self.auth_scheme_env, "token") + return os.environ.get(self.auth_scheme_env, self.auth_scheme_default_value) validate_cert_default_value = True validate_cert_env = "JUPYTER_GATEWAY_VALIDATE_CERT" @@ -259,34 +371,39 @@ def _auth_scheme_default(self): default_value=validate_cert_default_value, config=True, help="""For HTTPS requests, determines if server's certificate should be validated or not. - (JUPYTER_GATEWAY_VALIDATE_CERT env var)""", +(JUPYTER_GATEWAY_VALIDATE_CERT env var)""", ) @default("validate_cert") - def validate_cert_default(self): + def _validate_cert_default(self): return bool( os.environ.get(self.validate_cert_env, str(self.validate_cert_default_value)) not in ["no", "false"] ) - def __init__(self, **kwargs): - super().__init__(**kwargs) - self._static_args = {} # initialized on first use - - env_whitelist_default_value = "" - env_whitelist_env = "JUPYTER_GATEWAY_ENV_WHITELIST" - env_whitelist = Unicode( - default_value=env_whitelist_default_value, + allowed_envs_default_value = "" + allowed_envs_env = "JUPYTER_GATEWAY_ALLOWED_ENVS" + allowed_envs = Unicode( + default_value=allowed_envs_default_value, config=True, help="""A comma-separated list of environment variable names that will be included, along with - their values, in the kernel startup request. The corresponding `env_whitelist` configuration - value must also be set on the Gateway server - since that configuration value indicates which - environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)""", +their values, in the kernel startup request. The corresponding `client_envs` configuration +value must also be set on the Gateway server - since that configuration value indicates which +environmental values to make available to the kernel. (JUPYTER_GATEWAY_ALLOWED_ENVS env var)""", ) - @default("env_whitelist") - def _env_whitelist_default(self): - return os.environ.get(self.env_whitelist_env, self.env_whitelist_default_value) + @default("allowed_envs") + def _allowed_envs_default(self): + return os.environ.get( + self.allowed_envs_env, + os.environ.get("JUPYTER_GATEWAY_ENV_WHITELIST", self.allowed_envs_default_value), + ) + + env_whitelist = Unicode( + default_value=allowed_envs_default_value, + config=True, + help="""Deprecated, use `GatewayClient.allowed_envs`""", + ) gateway_retry_interval_default_value = 1.0 gateway_retry_interval_env = "JUPYTER_GATEWAY_RETRY_INTERVAL" @@ -294,16 +411,16 @@ def _env_whitelist_default(self): default_value=gateway_retry_interval_default_value, config=True, help="""The time allowed for HTTP reconnection with the Gateway server for the first time. - Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor of numbers of retries - but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. - (JUPYTER_GATEWAY_RETRY_INTERVAL env var)""", +Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor of numbers of retries +but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. +(JUPYTER_GATEWAY_RETRY_INTERVAL env var)""", ) @default("gateway_retry_interval") - def gateway_retry_interval_default(self): + def _gateway_retry_interval_default(self): return float( os.environ.get( - "JUPYTER_GATEWAY_RETRY_INTERVAL", + self.gateway_retry_interval_env, self.gateway_retry_interval_default_value, ) ) @@ -314,14 +431,14 @@ def gateway_retry_interval_default(self): default_value=gateway_retry_interval_max_default_value, config=True, help="""The maximum time allowed for HTTP reconnection retry with the Gateway server. - (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)""", +(JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)""", ) @default("gateway_retry_interval_max") - def gateway_retry_interval_max_default(self): + def _gateway_retry_interval_max_default(self): return float( os.environ.get( - "JUPYTER_GATEWAY_RETRY_INTERVAL_MAX", + self.gateway_retry_interval_max_env, self.gateway_retry_interval_max_default_value, ) ) @@ -332,15 +449,87 @@ def gateway_retry_interval_max_default(self): default_value=gateway_retry_max_default_value, config=True, help="""The maximum retries allowed for HTTP reconnection with the Gateway server. - (JUPYTER_GATEWAY_RETRY_MAX env var)""", +(JUPYTER_GATEWAY_RETRY_MAX env var)""", ) @default("gateway_retry_max") - def gateway_retry_max_default(self): - return int( - os.environ.get("JUPYTER_GATEWAY_RETRY_MAX", self.gateway_retry_max_default_value) + def _gateway_retry_max_default(self): + return int(os.environ.get(self.gateway_retry_max_env, self.gateway_retry_max_default_value)) + + gateway_token_renewer_class_default_value = ( + "jupyter_server.gateway.gateway_client.NoOpTokenRenewer" + ) + gateway_token_renewer_class_env = "JUPYTER_GATEWAY_TOKEN_RENEWER_CLASS" + gateway_token_renewer_class = Type( + klass=GatewayTokenRenewerBase, + config=True, + help="""The class to use for Gateway token renewal. (JUPYTER_GATEWAY_TOKEN_RENEWER_CLASS env var)""", + ) + + @default("gateway_token_renewer_class") + def _gateway_token_renewer_class_default(self): + return os.environ.get( + self.gateway_token_renewer_class_env, self.gateway_token_renewer_class_default_value + ) + + launch_timeout_pad_default_value = 2.0 + launch_timeout_pad_env = "JUPYTER_GATEWAY_LAUNCH_TIMEOUT_PAD" + launch_timeout_pad = Float( + default_value=launch_timeout_pad_default_value, + config=True, + help="""Timeout pad to be ensured between KERNEL_LAUNCH_TIMEOUT and request_timeout +such that request_timeout >= KERNEL_LAUNCH_TIMEOUT + launch_timeout_pad. +(JUPYTER_GATEWAY_LAUNCH_TIMEOUT_PAD env var)""", + ) + + @default("launch_timeout_pad") + def _launch_timeout_pad_default(self): + return float( + os.environ.get( + self.launch_timeout_pad_env, + self.launch_timeout_pad_default_value, + ) ) + accept_cookies_value = False + accept_cookies_env = "JUPYTER_GATEWAY_ACCEPT_COOKIES" + accept_cookies = Bool( + default_value=accept_cookies_value, + config=True, + help="""Accept and manage cookies sent by the service side. This is often useful + for load balancers to decide which backend node to use. + (JUPYTER_GATEWAY_ACCEPT_COOKIES env var)""", + ) + + @default("accept_cookies") + def _accept_cookies_default(self): + return bool( + os.environ.get(self.accept_cookies_env, str(self.accept_cookies_value).lower()) + not in ["no", "false"] + ) + + _deprecated_traits = { + "env_whitelist": ("allowed_envs", "2.0"), + } + + # Method copied from + # https://github.com/jupyterhub/jupyterhub/blob/d1a85e53dccfc7b1dd81b0c1985d158cc6b61820/jupyterhub/auth.py#L143-L161 + @observe(*list(_deprecated_traits)) + def _deprecated_trait(self, change): + """observer for deprecated traits""" + old_attr = change.name + new_attr, version = self._deprecated_traits[old_attr] + new_value = getattr(self, new_attr) + if new_value != change.new: + # only warn if different + # protects backward-compatible config from warnings + # if they set the same value under both names + self.log.warning( + f"{self.__class__.__name__}.{old_attr} is deprecated in jupyter_server " + f"{version}, use {self.__class__.__name__}.{new_attr} instead" + ) + setattr(self, new_attr, change.new) + @property def gateway_enabled(self): return bool(self.url is not None and len(self.url) > 0) @@ -348,84 +537,289 @@ def gateway_enabled(self): # Ensure KERNEL_LAUNCH_TIMEOUT has a default value. KERNEL_LAUNCH_TIMEOUT = int(os.environ.get("KERNEL_LAUNCH_TIMEOUT", 40)) - def init_static_args(self): - """Initialize arguments used on every request. Since these are static values, we'll - perform this operation once. + _connection_args: dict[str, ty.Any] # initialized on first use + + gateway_token_renewer: GatewayTokenRenewerBase + + def __init__(self, **kwargs): + """Initialize a gateway client.""" + super().__init__(**kwargs) + self._connection_args = {} # initialized on first use + self.gateway_token_renewer = self.gateway_token_renewer_class(parent=self, log=self.log) # type:ignore[abstract] + + # store of cookies with store time + self._cookies: dict[str, tuple[Morsel[ty.Any], datetime]] = {} + def init_connection_args(self): + """Initialize arguments used on every request. Since these are primarily static values, + we'll perform this operation once. """ - # Ensure that request timeout and KERNEL_LAUNCH_TIMEOUT are the same, taking the - # greater value of the two. - if self.request_timeout < float(GatewayClient.KERNEL_LAUNCH_TIMEOUT): - self.request_timeout = float(GatewayClient.KERNEL_LAUNCH_TIMEOUT) - elif self.request_timeout > float(GatewayClient.KERNEL_LAUNCH_TIMEOUT): - GatewayClient.KERNEL_LAUNCH_TIMEOUT = int(self.request_timeout) + # Ensure that request timeout and KERNEL_LAUNCH_TIMEOUT are in sync, taking the + # greater value of the two and taking into account the following relation: + # request_timeout = KERNEL_LAUNCH_TIME + padding + minimum_request_timeout = ( + float(GatewayClient.KERNEL_LAUNCH_TIMEOUT) + self.launch_timeout_pad + ) + if self.request_timeout < minimum_request_timeout: + self.request_timeout = minimum_request_timeout + elif self.request_timeout > minimum_request_timeout: + GatewayClient.KERNEL_LAUNCH_TIMEOUT = int( + self.request_timeout - self.launch_timeout_pad + ) # Ensure any adjustments are reflected in env. os.environ["KERNEL_LAUNCH_TIMEOUT"] = str(GatewayClient.KERNEL_LAUNCH_TIMEOUT) - self._static_args["headers"] = json.loads(self.headers) - if "Authorization" not in self._static_args["headers"].keys(): - self._static_args["headers"].update( - {"Authorization": f"{self.auth_scheme} {self.auth_token}"} - ) - self._static_args["connect_timeout"] = self.connect_timeout - self._static_args["request_timeout"] = self.request_timeout - self._static_args["validate_cert"] = self.validate_cert + if self.headers: + self._connection_args["headers"] = json.loads(self.headers) + if self.auth_header_key not in self._connection_args["headers"]: + self._connection_args["headers"].update( + {f"{self.auth_header_key}": f"{self.auth_scheme} {self.auth_token}"} + ) + self._connection_args["connect_timeout"] = self.connect_timeout + self._connection_args["request_timeout"] = self.request_timeout + self._connection_args["validate_cert"] = self.validate_cert if self.client_cert: - self._static_args["client_cert"] = self.client_cert - self._static_args["client_key"] = self.client_key + self._connection_args["client_cert"] = self.client_cert + self._connection_args["client_key"] = self.client_key if self.ca_certs: - self._static_args["ca_certs"] = self.ca_certs + self._connection_args["ca_certs"] = self.ca_certs if self.http_user: - self._static_args["auth_username"] = self.http_user + self._connection_args["auth_username"] = self.http_user if self.http_pwd: - self._static_args["auth_password"] = self.http_pwd + self._connection_args["auth_password"] = self.http_pwd def load_connection_args(self, **kwargs): - """Merges the static args relative to the connection, with the given keyword arguments. If statics - have yet to be initialized, we'll do that here. + """Merges the static args relative to the connection, with the given keyword arguments. If static + args have yet to be initialized, we'll do that here. """ - if len(self._static_args) == 0: - self.init_static_args() + if len(self._connection_args) == 0: + self.init_connection_args() + + # Give token renewal a shot at renewing the token + prev_auth_token = self.auth_token + if self.auth_token is not None: + try: + self.auth_token = self.gateway_token_renewer.get_token( + self.auth_header_key, self.auth_scheme, self.auth_token + ) + except Exception as ex: + self.log.error( + f"An exception occurred attempting to renew the " + f"Gateway authorization token using an instance of class " + f"'{self.gateway_token_renewer_class}'. The request will " + f"proceed using the current token value. Exception was: {ex}" + ) + self.auth_token = prev_auth_token + + for arg, value in self._connection_args.items(): + if arg == "headers": + given_value = kwargs.setdefault(arg, {}) + if isinstance(given_value, dict): + given_value.update(value) + # Ensure the auth header is current + given_value.update( + {f"{self.auth_header_key}": f"{self.auth_scheme} {self.auth_token}"} + ) + else: + kwargs[arg] = value + + if self.accept_cookies: + self._update_cookie_header(kwargs) - kwargs.update(self._static_args) return kwargs + def update_cookies(self, cookie: SimpleCookie) -> None: + """Update cookies from existing requests for load balancers""" + if not self.accept_cookies: + return + + store_time = datetime.now(tz=timezone.utc) + for key, item in cookie.items(): + # Convert "expires" arg into "max-age" to facilitate expiration management. + # As "max-age" has precedence, ignore "expires" when "max-age" exists. + if item.get("expires") and not item.get("max-age"): + expire_timedelta = parsedate_to_datetime(item["expires"]) - store_time + item["max-age"] = str(expire_timedelta.total_seconds()) + + self._cookies[key] = (item, store_time) + + def _clear_expired_cookies(self) -> None: + """Clear expired cookies.""" + check_time = datetime.now(tz=timezone.utc) + expired_keys = [] + + for key, (morsel, store_time) in self._cookies.items(): + cookie_max_age = morsel.get("max-age") + if not cookie_max_age: + continue + expired_timedelta = check_time - store_time + if expired_timedelta.total_seconds() > float(cookie_max_age): + expired_keys.append(key) + + for key in expired_keys: + self._cookies.pop(key) + + def _update_cookie_header(self, connection_args: dict[str, ty.Any]) -> None: + """Update a cookie header.""" + self._clear_expired_cookies() + + gateway_cookie_values = "; ".join( + f"{name}={morsel.coded_value}" for name, (morsel, _time) in self._cookies.items() + ) + if gateway_cookie_values: + headers = connection_args.get("headers", {}) + + # As headers are case-insensitive, we get existing name of cookie header, + # or use "Cookie" by default. + cookie_header_name = next( + (header_key for header_key in headers if header_key.lower() == "cookie"), + "Cookie", + ) + existing_cookie = headers.get(cookie_header_name) + + # merge gateway-managed cookies with cookies already in arguments + if existing_cookie: + gateway_cookie_values = existing_cookie + "; " + gateway_cookie_values + headers[cookie_header_name] = gateway_cookie_values + + connection_args["headers"] = headers + + +class RetryableHTTPClient: + """ + Inspired by urllib.util.Retry (https://urllib3.readthedocs.io/en/stable/reference/urllib3.util.html), + this class is initialized with desired retry characteristics, uses a recursive method `fetch()` against an instance + of `AsyncHTTPClient` which tracks the current retry count across applicable request retries. + """ + + MAX_RETRIES_DEFAULT = 2 + MAX_RETRIES_CAP = 10 # The upper limit to max_retries value. + max_retries: int = int(os.getenv("JUPYTER_GATEWAY_MAX_REQUEST_RETRIES", MAX_RETRIES_DEFAULT)) + max_retries = max(0, min(max_retries, MAX_RETRIES_CAP)) # Enforce boundaries + retried_methods: set[str] = {"GET", "DELETE"} + retried_errors: set[int] = {502, 503, 504, 599} + retried_exceptions: set[type] = {ConnectionError} + backoff_factor: float = 0.1 + + def __init__(self): + """Initialize the retryable http client.""" + self.retry_count: int = 0 + self.client: AsyncHTTPClient = AsyncHTTPClient() + + async def fetch(self, endpoint: str, **kwargs: ty.Any) -> HTTPResponse: + """ + Retryable AsyncHTTPClient.fetch() method. When the request fails, this method will + recurse up to max_retries times if the condition deserves a retry. + """ + self.retry_count = 0 + return await self._fetch(endpoint, **kwargs) + + async def _fetch(self, endpoint: str, **kwargs: ty.Any) -> HTTPResponse: + """ + Performs the fetch against the contained AsyncHTTPClient instance and determines + if retry is necessary on any exceptions. If so, retry is performed recursively. + """ + try: + response: HTTPResponse = await self.client.fetch(endpoint, **kwargs) + except Exception as e: + is_retryable: bool = await self._is_retryable(kwargs["method"], e) + if not is_retryable: + raise e + logging.getLogger("ServerApp").info( + f"Attempting retry ({self.retry_count}) against " + f"endpoint '{endpoint}'. Retried error: '{e!r}'" + ) + response = await self._fetch(endpoint, **kwargs) + return response + + async def _is_retryable(self, method: str, exception: Exception) -> bool: + """Determines if the given exception is retryable based on object's configuration.""" + + if method not in self.retried_methods: + return False + if self.retry_count == self.max_retries: + return False + + # Determine if error is retryable... + if isinstance(exception, HTTPClientError): + hce: HTTPClientError = exception + if hce.code not in self.retried_errors: + return False + elif not any(isinstance(exception, error) for error in self.retried_exceptions): + return False + + # Is retryable, wait for backoff, then increment count + await asyncio.sleep(self.backoff_factor * (2**self.retry_count)) + self.retry_count += 1 + return True + -async def gateway_request(endpoint, **kwargs): +async def gateway_request(endpoint: str, **kwargs: ty.Any) -> HTTPResponse: """Make an async request to kernel gateway endpoint, returns a response""" - client = AsyncHTTPClient() kwargs = GatewayClient.instance().load_connection_args(**kwargs) + rhc = RetryableHTTPClient() try: - response = await client.fetch(endpoint, **kwargs) + response = await rhc.fetch(endpoint, **kwargs) + GatewayClient.instance().emit( + data={STATUS_KEY: SUCCESS_STATUS, STATUS_CODE_KEY: 200, MESSAGE_KEY: "success"} + ) # Trap a set of common exceptions so that we can inform the user that their Gateway url is incorrect # or the server is not running. - # NOTE: We do this here since this handler is called during the Notebook's startup and subsequent refreshes + # NOTE: We do this here since this handler is called during the server's startup and subsequent refreshes # of the tree view. - except ConnectionRefusedError as e: + except HTTPClientError as e: + GatewayClient.instance().emit( + data={STATUS_KEY: ERROR_STATUS, STATUS_CODE_KEY: e.code, MESSAGE_KEY: str(e.message)} + ) + error_reason = f"Exception while attempting to connect to Gateway server url '{GatewayClient.instance().url}'" + error_message = e.message + if e.response: + try: + error_payload = json.loads(e.response.body) + error_reason = error_payload.get("reason") or error_reason + error_message = error_payload.get("message") or error_message + except json.decoder.JSONDecodeError: + error_reason = e.response.body.decode() + raise web.HTTPError( - 503, - "Connection refused from Gateway server url '{}'. " - "Check to be sure the Gateway instance is running.".format( - GatewayClient.instance().url - ), + e.code, + f"Error from Gateway: [{error_message}] {error_reason}. " + "Ensure gateway url is valid and the Gateway instance is running.", ) from e - except HTTPError as e: - # This can occur if the host is valid (e.g., foo.com) but there's nothing there. + except ConnectionError as e: + GatewayClient.instance().emit( + data={STATUS_KEY: ERROR_STATUS, STATUS_CODE_KEY: 503, MESSAGE_KEY: str(e)} + ) raise web.HTTPError( - e.code, - "Error attempting to connect to Gateway server url '{}'. " - "Ensure gateway url is valid and the Gateway instance is running.".format( - GatewayClient.instance().url - ), + 503, + f"ConnectionError was received from Gateway server url '{GatewayClient.instance().url}'. " + "Check to be sure the Gateway instance is running.", ) from e except gaierror as e: + GatewayClient.instance().emit( + data={STATUS_KEY: ERROR_STATUS, STATUS_CODE_KEY: 404, MESSAGE_KEY: str(e)} + ) raise web.HTTPError( 404, - "The Gateway server specified in the gateway_url '{}' doesn't appear to be valid. " - "Ensure gateway url is valid and the Gateway instance is running.".format( - GatewayClient.instance().url - ), + f"The Gateway server specified in the gateway_url '{GatewayClient.instance().url}' doesn't " + f"appear to be valid. Ensure gateway url is valid and the Gateway instance is running.", ) from e - + except Exception as e: + GatewayClient.instance().emit( + data={STATUS_KEY: ERROR_STATUS, STATUS_CODE_KEY: 505, MESSAGE_KEY: str(e)} + ) + logging.getLogger("ServerApp").error( + f"Exception while trying to launch kernel via Gateway URL {GatewayClient.instance().url} , {e}", + e, + ) + raise e + + if GatewayClient.instance().accept_cookies: + # Update cookies on GatewayClient from server if configured. + cookie_values = response.headers.get("Set-Cookie") + if cookie_values: + cookie: SimpleCookie = SimpleCookie() + cookie.load(cookie_values) + GatewayClient.instance().update_cookies(cookie) return response diff --git a/jupyter_server/gateway/handlers.py b/jupyter_server/gateway/handlers.py index a36f2d4faf..dcde4cd5ca 100644 --- a/jupyter_server/gateway/handlers.py +++ b/jupyter_server/gateway/handlers.py @@ -1,10 +1,15 @@ +"""Gateway API handlers.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import asyncio import logging import mimetypes import os import random +import warnings +from typing import Any, Optional, cast from jupyter_client.session import Session from tornado import web @@ -17,13 +22,21 @@ from ..base.handlers import APIHandler, JupyterHandler from ..utils import url_path_join -from .managers import GatewayClient +from .gateway_client import GatewayClient + +warnings.warn( + "The jupyter_server.gateway.handlers module is deprecated and will not be supported in Jupyter Server 3.0", + DeprecationWarning, + stacklevel=2, +) + # Keepalive ping interval (default: 30 seconds) -GATEWAY_WS_PING_INTERVAL_SECS = int(os.getenv("GATEWAY_WS_PING_INTERVAL_SECS", 30)) +GATEWAY_WS_PING_INTERVAL_SECS = int(os.getenv("GATEWAY_WS_PING_INTERVAL_SECS", "30")) class WebSocketChannelsHandler(WebSocketHandler, JupyterHandler): + """Gateway web socket channels handler.""" session = None gateway = None @@ -31,13 +44,14 @@ class WebSocketChannelsHandler(WebSocketHandler, JupyterHandler): ping_callback = None def check_origin(self, origin=None): + """Check origin for the socket.""" return JupyterHandler.check_origin(self, origin) def set_default_headers(self): """Undo the set_default_headers in JupyterHandler which doesn't make sense for websockets""" - pass def get_compression_options(self): + """Get the compression options for the socket.""" # use deflate compress websocket return {} @@ -48,28 +62,33 @@ def authenticate(self): the websocket finishes completing. """ # authenticate the request before opening the websocket - if self.get_current_user() is None: + if self.current_user is None: self.log.warning("Couldn't authenticate WebSocket connection") raise web.HTTPError(403) - if self.get_argument("session_id", False): - self.session.session = self.get_argument("session_id") + if self.get_argument("session_id", None): + assert self.session is not None + self.session.session = self.get_argument("session_id") # type:ignore[unreachable] else: self.log.warning("No session ID specified") def initialize(self): + """Initialize the socket.""" self.log.debug("Initializing websocket connection %s", self.request.path) self.session = Session(config=self.config) self.gateway = GatewayWebSocketClient(gateway_url=GatewayClient.instance().url) async def get(self, kernel_id, *args, **kwargs): + """Get the socket.""" self.authenticate() self.kernel_id = kernel_id - await super().get(kernel_id=kernel_id, *args, **kwargs) + kwargs["kernel_id"] = kernel_id + await super().get(*args, **kwargs) def send_ping(self): + """Send a ping to the socket.""" if self.ws_connection is None and self.ping_callback is not None: - self.ping_callback.stop() + self.ping_callback.stop() # type:ignore[unreachable] return self.ping(b"") @@ -79,6 +98,7 @@ def open(self, kernel_id, *args, **kwargs): self.ping_callback = PeriodicCallback(self.send_ping, GATEWAY_WS_PING_INTERVAL_SECS * 1000) self.ping_callback.start() + assert self.gateway is not None self.gateway.on_open( kernel_id=kernel_id, message_callback=self.write_message, @@ -87,6 +107,7 @@ def open(self, kernel_id, *args, **kwargs): def on_message(self, message): """Forward message to gateway web socket handler.""" + assert self.gateway is not None self.gateway.on_message(message) def write_message(self, message, binary=False): @@ -98,18 +119,19 @@ def write_message(self, message, binary=False): elif self.log.isEnabledFor(logging.DEBUG): msg_summary = WebSocketChannelsHandler._get_message_summary(json_decode(utf8(message))) self.log.debug( - "Notebook client closed websocket connection - message dropped: {}".format( - msg_summary - ) + f"Notebook client closed websocket connection - message dropped: {msg_summary}" ) def on_close(self): + """Handle a closing socket.""" self.log.debug("Closing websocket connection %s", self.request.path) + assert self.gateway is not None self.gateway.on_close() super().on_close() @staticmethod def _get_message_summary(message): + """Get a summary of a message.""" summary = [] message_type = message["msg_type"] summary.append(f"type: {message_type}") @@ -134,35 +156,41 @@ class GatewayWebSocketClient(LoggingConfigurable): """Proxy web socket connection to a kernel/enterprise gateway.""" def __init__(self, **kwargs): - super().__init__(**kwargs) + """Initialize the gateway web socket client.""" + super().__init__() self.kernel_id = None self.ws = None - self.ws_future = Future() + self.ws_future: Future[Any] = Future() self.disconnected = False self.retry = 0 async def _connect(self, kernel_id, message_callback): + """Connect to the socket.""" # websocket is initialized before connection self.ws = None self.kernel_id = kernel_id + client = GatewayClient.instance() + assert client.ws_url is not None + ws_url = url_path_join( - GatewayClient.instance().ws_url, - GatewayClient.instance().kernels_endpoint, + client.ws_url, + client.kernels_endpoint, url_escape(kernel_id), "channels", ) self.log.info(f"Connecting to {ws_url}") - kwargs = {} - kwargs = GatewayClient.instance().load_connection_args(**kwargs) + kwargs: dict[str, Any] = {} + kwargs = client.load_connection_args(**kwargs) request = HTTPRequest(ws_url, **kwargs) - self.ws_future = websocket_connect(request) + self.ws_future = cast("Future[Any]", websocket_connect(request)) self.ws_future.add_done_callback(self._connection_done) loop = IOLoop.current() loop.add_future(self.ws_future, lambda future: self._read_messages(message_callback)) def _connection_done(self, fut): + """Handle a finished connection.""" if ( not self.disconnected and fut.exception() is None ): # prevent concurrent.futures._base.CancelledError @@ -178,6 +206,7 @@ def _connection_done(self, fut): ) def _disconnect(self): + """Handle a disconnect.""" self.disconnected = True if self.ws is not None: # Close connection @@ -261,16 +290,20 @@ class GatewayResourceHandler(APIHandler): @web.authenticated async def get(self, kernel_name, path, include_body=True): + """Get a gateway resource by name and path.""" + mimetype: Optional[str] = None ksm = self.kernel_spec_manager - kernel_spec_res = await ksm.get_kernel_spec_resource(kernel_name, path) + kernel_spec_res = await ksm.get_kernel_spec_resource( # type:ignore[attr-defined] + kernel_name, path + ) if kernel_spec_res is None: self.log.warning( "Kernelspec resource '{}' for '{}' not found. Gateway may not support" " resource serving.".format(path, kernel_name) ) else: - self.set_header("Content-Type", mimetypes.guess_type(path)[0]) - self.finish(kernel_spec_res) + mimetype = mimetypes.guess_type(path)[0] or "text/plain" + self.finish(kernel_spec_res, set_content_type=mimetype) from ..services.kernels.handlers import _kernel_id_regex diff --git a/jupyter_server/gateway/managers.py b/jupyter_server/gateway/managers.py index 4645429cf1..cd0b27b50d 100644 --- a/jupyter_server/gateway/managers.py +++ b/jupyter_server/gateway/managers.py @@ -1,27 +1,36 @@ +"""Kernel gateway managers.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + +import asyncio import datetime import json import os from logging import Logger -from queue import Queue +from queue import Empty, Queue from threading import Thread -from typing import Dict +from time import monotonic +from typing import Any, Optional, cast import websocket from jupyter_client.asynchronous.client import AsyncKernelClient from jupyter_client.clientabc import KernelClientABC from jupyter_client.kernelspec import KernelSpecManager -from jupyter_client.manager import AsyncKernelManager from jupyter_client.managerabc import KernelManagerABC +from jupyter_core.utils import ensure_async from tornado import web from tornado.escape import json_decode, json_encode, url_escape, utf8 from traitlets import DottedObjectName, Instance, Type, default -from .._tz import UTC -from ..services.kernels.kernelmanager import AsyncMappingKernelManager +from .._tz import UTC, utcnow +from ..services.kernels.kernelmanager import ( + AsyncMappingKernelManager, + ServerKernelManager, + emit_kernel_action_event, +) from ..services.sessions.sessionmanager import SessionManager -from ..utils import ensure_async, url_path_join +from ..utils import url_path_join from .gateway_client import GatewayClient, gateway_request @@ -29,7 +38,7 @@ class GatewayMappingKernelManager(AsyncMappingKernelManager): """Kernel manager that supports remote kernels hosted by Jupyter Kernel or Enterprise Gateway.""" # We'll maintain our own set of kernel ids - _kernels: Dict[str, "GatewayKernelManager"] = {} + _kernels: dict[str, GatewayKernelManager] = {} # type:ignore[assignment] @default("kernel_manager_class") def _default_kernel_manager_class(self): @@ -40,9 +49,10 @@ def _default_shared_context(self): return False # no need to share zmq contexts def __init__(self, **kwargs): + """Initialize a gateway mapping kernel manager.""" super().__init__(**kwargs) self.kernels_url = url_path_join( - GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint + GatewayClient.instance().url or "", GatewayClient.instance().kernels_endpoint or "" ) def remove_kernel(self, kernel_id): @@ -52,7 +62,7 @@ def remove_kernel(self, kernel_id): except KeyError: pass - async def start_kernel(self, kernel_id=None, path=None, **kwargs): + async def start_kernel(self, *, kernel_id=None, path=None, **kwargs): """Start a kernel for a session and return its kernel_id. Parameters @@ -67,15 +77,13 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): """ self.log.info(f"Request start kernel: kernel_id={kernel_id}, path='{path}'") - if kernel_id is None: - if path is not None: - kwargs["cwd"] = self.cwd_for_path(path) + if kernel_id is None and path is not None: + kwargs["cwd"] = self.cwd_for_path(path) km = self.kernel_manager_factory(parent=self, log=self.log) await km.start_kernel(kernel_id=kernel_id, **kwargs) kernel_id = km.kernel_id self._kernels[kernel_id] = km - # Initialize culling if not already if not self._initialized_culler: self.initialize_culler() @@ -92,9 +100,9 @@ async def kernel_model(self, kernel_id): The uuid of the kernel. """ model = None - km = self.get_kernel(kernel_id) - if km: - model = km.kernel + km = self.get_kernel(str(kernel_id)) + if km: # type:ignore[truthy-bool] + model = km.kernel # type:ignore[attr-defined] return model async def list_kernels(self, **kwargs): @@ -119,11 +127,34 @@ async def list_kernels(self, **kwargs): culled_ids = [] for kid, _ in our_kernels.items(): if kid not in kernel_models: + # The upstream kernel was not reported in the list of kernels. self.log.warning( - f"Kernel {kid} no longer active - probably culled on Gateway server." + f"Kernel {kid} not present in the list of kernels - possibly culled on Gateway server." ) - self._kernels.pop(kid, None) - culled_ids.append(kid) # TODO: Figure out what do with these. + try: + # Try to directly refresh the model for this specific kernel in case + # the upstream list of kernels was erroneously incomplete. + # + # That might happen if the case of a proxy that manages multiple + # backends where there could be transient connectivity issues with + # a single backend. + # + # Alternatively, it could happen if there is simply a bug in the + # upstream gateway server. + # + # Either way, including this check improves our reliability in the + # face of such scenarios. + model = await self._kernels[kid].refresh_model() + except web.HTTPError: + model = None + if model: + kernel_models[kid] = model + else: + self.log.warning( + f"Kernel {kid} no longer active - probably culled on Gateway server." + ) + self._kernels.pop(kid, None) + culled_ids.append(kid) # TODO: Figure out what do with these. return list(kernel_models.values()) async def shutdown_kernel(self, kernel_id, now=False, restart=False): @@ -139,7 +170,7 @@ async def shutdown_kernel(self, kernel_id, now=False, restart=False): The purpose of this shutdown is to restart the kernel (True) """ km = self.get_kernel(kernel_id) - await km.shutdown_kernel(now=now, restart=restart) + await ensure_async(km.shutdown_kernel(now=now, restart=restart)) self.remove_kernel(kernel_id) async def restart_kernel(self, kernel_id, now=False, **kwargs): @@ -151,7 +182,7 @@ async def restart_kernel(self, kernel_id, now=False, **kwargs): The id of the kernel to restart. """ km = self.get_kernel(kernel_id) - await km.restart_kernel(now=now, **kwargs) + await ensure_async(km.restart_kernel(now=now, **kwargs)) async def interrupt_kernel(self, kernel_id, **kwargs): """Interrupt a kernel by its kernel uuid. @@ -162,44 +193,73 @@ async def interrupt_kernel(self, kernel_id, **kwargs): The id of the kernel to interrupt. """ km = self.get_kernel(kernel_id) - await km.interrupt_kernel() + await ensure_async(km.interrupt_kernel()) async def shutdown_all(self, now=False): """Shutdown all kernels.""" - for kernel_id in self._kernels: + kids = list(self._kernels) + for kernel_id in kids: km = self.get_kernel(kernel_id) - await km.shutdown_kernel(now=now) + await ensure_async(km.shutdown_kernel(now=now)) self.remove_kernel(kernel_id) async def cull_kernels(self): - """Override cull_kernels so we can be sure their state is current.""" + """Override cull_kernels, so we can be sure their state is current.""" await self.list_kernels() await super().cull_kernels() class GatewayKernelSpecManager(KernelSpecManager): + """A gateway kernel spec manager.""" + def __init__(self, **kwargs): + """Initialize a gateway kernel spec manager.""" super().__init__(**kwargs) base_endpoint = url_path_join( - GatewayClient.instance().url, GatewayClient.instance().kernelspecs_endpoint + GatewayClient.instance().url or "", GatewayClient.instance().kernelspecs_endpoint ) self.base_endpoint = GatewayKernelSpecManager._get_endpoint_for_user_filter(base_endpoint) self.base_resource_endpoint = url_path_join( - GatewayClient.instance().url, + GatewayClient.instance().url or "", GatewayClient.instance().kernelspecs_resource_endpoint, ) @staticmethod def _get_endpoint_for_user_filter(default_endpoint): + """Get the endpoint for a user filter.""" kernel_user = os.environ.get("KERNEL_USERNAME") if kernel_user: return "?user=".join([default_endpoint, kernel_user]) return default_endpoint + def _replace_path_kernelspec_resources(self, kernel_specs): + """Helper method that replaces any gateway base_url with the server's base_url + This enables clients to properly route through jupyter_server to a gateway + for kernel resources such as logo files + """ + if not self.parent: + return {} + kernelspecs = kernel_specs["kernelspecs"] + for kernel_name in kernelspecs: + resources = kernelspecs[kernel_name]["resources"] + for resource_name in resources: + original_path = resources[resource_name] + split_eg_base_url = str.rsplit(original_path, sep="/kernelspecs/", maxsplit=1) + if len(split_eg_base_url) > 1: + new_path = url_path_join( + self.parent.base_url, "kernelspecs", split_eg_base_url[1] + ) + kernel_specs["kernelspecs"][kernel_name]["resources"][resource_name] = new_path + if original_path != new_path: + self.log.debug( + f"Replaced original kernel resource path {original_path} with new " + f"path {kernel_specs['kernelspecs'][kernel_name]['resources'][resource_name]}" + ) + return kernel_specs + def _get_kernelspecs_endpoint_url(self, kernel_name=None): """Builds a url for the kernels endpoint - Parameters ---------- kernel_name : kernel name (optional) @@ -210,12 +270,15 @@ def _get_kernelspecs_endpoint_url(self, kernel_name=None): return self.base_endpoint async def get_all_specs(self): + """Get all of the kernel specs for the gateway.""" fetched_kspecs = await self.list_kernel_specs() # get the default kernel name and compare to that of this server. # If different log a warning and reset the default. However, the # caller of this method will still return this server's value until # the next fetch of kernelspecs - at which time they'll match. + if not self.parent: + return {} km = self.parent.kernel_manager remote_default_kernel_name = fetched_kspecs.get("default") if remote_default_kernel_name != km.default_kernel_name: @@ -234,6 +297,7 @@ async def list_kernel_specs(self): self.log.debug(f"Request list kernel specs at: {kernel_spec_url}") response = await gateway_request(kernel_spec_url, method="GET") kernel_specs = json_decode(response.body) + kernel_specs = self._replace_path_kernelspec_resources(kernel_specs) return kernel_specs async def get_kernel_spec(self, kernel_name, **kwargs): @@ -252,12 +316,8 @@ async def get_kernel_spec(self, kernel_name, **kwargs): if error.status_code == 404: # Convert not found to KeyError since that's what the Notebook handler expects # message is not used, but might as well make it useful for troubleshooting - raise KeyError( - "kernelspec {kernel_name} not found on Gateway server at: {gateway_url}".format( - kernel_name=kernel_name, - gateway_url=GatewayClient.instance().url, - ) - ) from error + msg = f"kernelspec {kernel_name} not found on Gateway server at: {GatewayClient.instance().url}" + raise KeyError(msg) from None else: raise else: @@ -292,26 +352,32 @@ async def get_kernel_spec_resource(self, kernel_name, path): class GatewaySessionManager(SessionManager): + """A gateway session manager.""" + kernel_manager = Instance("jupyter_server.gateway.managers.GatewayMappingKernelManager") - async def kernel_culled(self, kernel_id): - """Checks if the kernel is still considered alive and returns true if its not found.""" - kernel = None + async def kernel_culled(self, kernel_id: str) -> bool: # typing: ignore + """Checks if the kernel is still considered alive and returns true if it's not found.""" + km: Optional[GatewayKernelManager] = None try: + # Since we keep the models up-to-date via client polling, use that state to determine + # if this kernel no longer exists on the gateway server rather than perform a redundant + # fetch operation - especially since this is called at approximately the same interval. + # This has the effect of reducing GET /api/kernels requests against the gateway server + # by 50%! + # Note that should the redundant polling be consolidated, or replaced with an event-based + # notification model, this will need to be revisited. km = self.kernel_manager.get_kernel(kernel_id) - kernel = await km.refresh_model() - except Exception: # Let exceptions here reflect culled kernel + except Exception: + # Let exceptions here reflect culled kernel pass - return kernel is None + return km is None -"""KernelManager class to manage a kernel running on a Gateway Server via the REST API""" - - -class GatewayKernelManager(AsyncKernelManager): +class GatewayKernelManager(ServerKernelManager): """Manages a single kernel remotely via a Gateway Server.""" - kernel_id = None + kernel_id: Optional[str] = None # type:ignore[assignment] kernel = None @default("cache_ports") @@ -319,13 +385,16 @@ def _default_cache_ports(self): return False # no need to cache ports here def __init__(self, **kwargs): + """Initialize the gateway kernel manager.""" super().__init__(**kwargs) self.kernels_url = url_path_join( - GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint + GatewayClient.instance().url or "", GatewayClient.instance().kernels_endpoint ) - self.kernel_url = self.kernel = self.kernel_id = None + self.kernel_url: str + self.kernel = self.kernel_id = None # simulate busy/activity markers: - self.execution_state = self.last_activity = None + self.execution_state = "starting" + self.last_activity = utcnow() @property def has_kernel(self): @@ -341,13 +410,13 @@ def has_kernel(self): def client(self, **kwargs): """Create a client configured to connect to our kernel""" - kw = {} + kw: dict[str, Any] = {} kw.update(self.get_connection_info(session=True)) kw.update( - dict( - connection_file=self.connection_file, - parent=self, - ) + { + "connection_file": self.connection_file, + "parent": self, + } ) kw["kernel_id"] = self.kernel_id @@ -387,9 +456,9 @@ async def refresh_model(self, model=None): if isinstance(self.parent, AsyncMappingKernelManager): # Update connections only if there's a mapping kernel manager parent for # this kernel manager. The current kernel manager instance may not have - # an parent instance if, say, a server extension is using another application + # a parent instance if, say, a server extension is using another application # (e.g., papermill) that uses a KernelManager instance directly. - self.parent._kernel_connections[self.kernel_id] = int(model["connections"]) + self.parent._kernel_connections[self.kernel_id] = int(model["connections"]) # type:ignore[index] self.kernel = model return model @@ -398,6 +467,9 @@ async def refresh_model(self, model=None): # Kernel management # -------------------------------------------------------------------------- + @emit_kernel_action_event( + success_msg="Kernel {kernel_id} was started.", + ) async def start_kernel(self, **kwargs): """Starts a kernel via HTTP in an asynchronous manner. @@ -415,24 +487,30 @@ async def start_kernel(self, **kwargs): # Let KERNEL_USERNAME take precedent over http_user config option. if os.environ.get("KERNEL_USERNAME") is None and GatewayClient.instance().http_user: - os.environ["KERNEL_USERNAME"] = GatewayClient.instance().http_user + os.environ["KERNEL_USERNAME"] = GatewayClient.instance().http_user or "" + + payload_envs = os.environ.copy() + payload_envs.update(kwargs.get("env", {})) # Add any env entries in this request + # Build the actual env payload, filtering allowed_envs and those starting with 'KERNEL_' kernel_env = { k: v - for (k, v) in dict(os.environ).items() - if k.startswith("KERNEL_") or k in GatewayClient.instance().env_whitelist.split(",") + for (k, v) in payload_envs.items() + if k.startswith("KERNEL_") or k in GatewayClient.instance().allowed_envs.split(",") } - # Add any env entries in this request - kernel_env.update(kwargs.get("env", {})) - # Convey the full path to where this notebook file is located. if kwargs.get("cwd") is not None and kernel_env.get("KERNEL_WORKING_DIR") is None: kernel_env["KERNEL_WORKING_DIR"] = kwargs["cwd"] json_body = json_encode({"name": kernel_name, "env": kernel_env}) - response = await gateway_request(self.kernels_url, method="POST", body=json_body) + response = await gateway_request( + self.kernels_url, + method="POST", + headers={"Content-Type": "application/json"}, + body=json_body, + ) self.kernel = json_decode(response.body) self.kernel_id = self.kernel["id"] self.kernel_url = url_path_join(self.kernels_url, url_escape(str(self.kernel_id))) @@ -443,28 +521,55 @@ async def start_kernel(self, **kwargs): self.kernel = await self.refresh_model() self.log.info(f"GatewayKernelManager using existing kernel: {self.kernel_id}") + @emit_kernel_action_event( + success_msg="Kernel {kernel_id} was shutdown.", + ) async def shutdown_kernel(self, now=False, restart=False): """Attempts to stop the kernel process cleanly via HTTP.""" if self.has_kernel: self.log.debug("Request shutdown kernel at: %s", self.kernel_url) - response = await gateway_request(self.kernel_url, method="DELETE") - self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) + try: + response = await gateway_request(self.kernel_url, method="DELETE") + self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) + except web.HTTPError as error: + if error.status_code == 404: + self.log.debug("Shutdown kernel response: kernel not found (ignored)") + else: + raise + @emit_kernel_action_event( + success_msg="Kernel {kernel_id} was restarted.", + ) async def restart_kernel(self, **kw): """Restarts a kernel via HTTP.""" if self.has_kernel: + assert self.kernel_url is not None kernel_url = self.kernel_url + "/restart" self.log.debug("Request restart kernel at: %s", kernel_url) - response = await gateway_request(kernel_url, method="POST", body=json_encode({})) + response = await gateway_request( + kernel_url, + method="POST", + headers={"Content-Type": "application/json"}, + body=json_encode({}), + ) self.log.debug("Restart kernel response: %d %s", response.code, response.reason) + @emit_kernel_action_event( + success_msg="Kernel {kernel_id} was interrupted.", + ) async def interrupt_kernel(self): """Interrupts the kernel via an HTTP request.""" if self.has_kernel: + assert self.kernel_url is not None kernel_url = self.kernel_url + "/interrupt" self.log.debug("Request interrupt kernel at: %s", kernel_url) - response = await gateway_request(kernel_url, method="POST", body=json_encode({})) + response = await gateway_request( + kernel_url, + method="POST", + headers={"Content-Type": "application/json"}, + body=json_encode({}), + ) self.log.debug("Interrupt kernel response: %d %s", response.code, response.reason) async def is_alive(self): @@ -472,40 +577,67 @@ async def is_alive(self): if self.has_kernel: # Go ahead and issue a request to get the kernel self.kernel = await self.refresh_model() + self.log.debug(f"The kernel: {self.kernel} is alive.") return True else: # we don't have a kernel + self.log.debug(f"The kernel: {self.kernel} no longer exists.") return False def cleanup_resources(self, restart=False): """Clean up resources when the kernel is shut down""" - pass KernelManagerABC.register(GatewayKernelManager) -class ChannelQueue(Queue): +class ChannelQueue(Queue): # type:ignore[type-arg] + """A queue for a named channel.""" - channel_name: str = None + channel_name: Optional[str] = None + response_router_finished: bool def __init__(self, channel_name: str, channel_socket: websocket.WebSocket, log: Logger): + """Initialize a channel queue.""" super().__init__() self.channel_name = channel_name self.channel_socket = channel_socket self.log = log + self.response_router_finished = False + + async def _async_get(self, timeout=None): + """Asynchronously get from the queue.""" + if timeout is None: + timeout = float("inf") + elif timeout < 0: + msg = "'timeout' must be a non-negative number" + raise ValueError(msg) + end_time = monotonic() + timeout + + while True: + try: + return self.get(block=False) + except Empty: + if self.response_router_finished: + msg = "Response router had finished" + raise RuntimeError(msg) from None + if monotonic() > end_time: + raise + await asyncio.sleep(0) - async def get_msg(self, *args, **kwargs) -> dict: + async def get_msg(self, *args: Any, **kwargs: Any) -> dict[str, Any]: + """Get a message from the queue.""" timeout = kwargs.get("timeout", 1) - msg = self.get(timeout=timeout) + msg = await self._async_get(timeout=timeout) self.log.debug( "Received message on channel: {}, msg_id: {}, msg_type: {}".format( self.channel_name, msg["msg_id"], msg["msg_type"] if msg else "null" ) ) self.task_done() - return msg + return cast("dict[str, Any]", msg) - def send(self, msg: dict) -> None: + def send(self, msg: dict[str, Any]) -> None: + """Send a message to the queue.""" message = json.dumps(msg, default=ChannelQueue.serialize_datetime).replace(" None: @staticmethod def serialize_datetime(dt): - if isinstance(dt, (datetime.date, datetime.datetime)): + """Serialize a datetime object.""" + if isinstance(dt, datetime.datetime): return dt.timestamp() return None def start(self) -> None: - pass + """Start the queue.""" def stop(self) -> None: + """Stop the queue.""" if not self.empty(): # If unprocessed messages are detected, drain the queue collecting non-status # messages. If any remain that are not 'shutdown_reply' and this is not iopub @@ -543,11 +677,15 @@ def stop(self) -> None: ) def is_alive(self) -> bool: + """Whether the queue is alive.""" return self.channel_socket is not None class HBChannelQueue(ChannelQueue): + """A queue for the heartbeat channel.""" + def is_beating(self) -> bool: + """Whether the channel is beating.""" # Just use the is_alive status for now return self.is_alive() @@ -571,14 +709,22 @@ class GatewayKernelClient(AsyncKernelClient): # flag for whether execute requests should be allowed to call raw_input: allow_stdin = False - _channels_stopped = False - _channel_queues = {} - - def __init__(self, **kwargs): + _channels_stopped: bool + _channel_queues: Optional[dict[str, ChannelQueue]] + _control_channel: Optional[ChannelQueue] # type:ignore[assignment] + _hb_channel: Optional[ChannelQueue] # type:ignore[assignment] + _stdin_channel: Optional[ChannelQueue] # type:ignore[assignment] + _iopub_channel: Optional[ChannelQueue] # type:ignore[assignment] + _shell_channel: Optional[ChannelQueue] # type:ignore[assignment] + + def __init__(self, kernel_id, **kwargs): + """Initialize a gateway kernel client.""" super().__init__(**kwargs) - self.kernel_id = kwargs["kernel_id"] - self.channel_socket = None - self.response_router = None + self.kernel_id = kernel_id + self.channel_socket: Optional[websocket.WebSocket] = None + self.response_router: Optional[Thread] = None + self._channels_stopped = False + self._channel_queues = {} # -------------------------------------------------------------------------- # Channel management methods @@ -588,21 +734,22 @@ async def start_channels(self, shell=True, iopub=True, stdin=True, hb=True, cont """Starts the channels for this kernel. For this class, we establish a websocket connection to the destination - and setup the channel-based queues on which applicable messages will + and set up the channel-based queues on which applicable messages will be posted. """ ws_url = url_path_join( - GatewayClient.instance().ws_url, + GatewayClient.instance().ws_url or "", GatewayClient.instance().kernels_endpoint, url_escape(self.kernel_id), "channels", ) # Gather cert info in case where ssl is desired... - ssl_options = {} - ssl_options["ca_certs"] = GatewayClient.instance().ca_certs - ssl_options["certfile"] = GatewayClient.instance().client_cert - ssl_options["keyfile"] = GatewayClient.instance().client_key + ssl_options = { + "ca_certs": GatewayClient.instance().ca_certs, + "certfile": GatewayClient.instance().client_cert, + "keyfile": GatewayClient.instance().client_key, + } self.channel_socket = websocket.create_connection( ws_url, @@ -610,13 +757,14 @@ async def start_channels(self, shell=True, iopub=True, stdin=True, hb=True, cont enable_multithread=True, sslopt=ssl_options, ) - self.response_router = Thread(target=self._route_responses) - self.response_router.start() await ensure_async( super().start_channels(shell=shell, iopub=iopub, stdin=stdin, hb=hb, control=control) ) + self.response_router = Thread(target=self._route_responses) + self.response_router.start() + def stop_channels(self): """Stops all the running channels for this kernel. @@ -627,7 +775,9 @@ def stop_channels(self): self._channels_stopped = True self.log.debug("Closing websocket connection") + assert self.channel_socket is not None self.channel_socket.close() + assert self.response_router is not None self.response_router.join() if self._channel_queues: @@ -641,7 +791,9 @@ def shell_channel(self): """Get the shell channel object for this kernel.""" if self._shell_channel is None: self.log.debug("creating shell channel queue") + assert self.channel_socket is not None self._shell_channel = ChannelQueue("shell", self.channel_socket, self.log) + assert self._channel_queues is not None self._channel_queues["shell"] = self._shell_channel return self._shell_channel @@ -650,7 +802,9 @@ def iopub_channel(self): """Get the iopub channel object for this kernel.""" if self._iopub_channel is None: self.log.debug("creating iopub channel queue") + assert self.channel_socket is not None self._iopub_channel = ChannelQueue("iopub", self.channel_socket, self.log) + assert self._channel_queues is not None self._channel_queues["iopub"] = self._iopub_channel return self._iopub_channel @@ -659,7 +813,9 @@ def stdin_channel(self): """Get the stdin channel object for this kernel.""" if self._stdin_channel is None: self.log.debug("creating stdin channel queue") + assert self.channel_socket is not None self._stdin_channel = ChannelQueue("stdin", self.channel_socket, self.log) + assert self._channel_queues is not None self._channel_queues["stdin"] = self._stdin_channel return self._stdin_channel @@ -668,7 +824,9 @@ def hb_channel(self): """Get the hb channel object for this kernel.""" if self._hb_channel is None: self.log.debug("creating hb channel queue") + assert self.channel_socket is not None self._hb_channel = HBChannelQueue("hb", self.channel_socket, self.log) + assert self._channel_queues is not None self._channel_queues["hb"] = self._hb_channel return self._hb_channel @@ -677,7 +835,9 @@ def control_channel(self): """Get the control channel object for this kernel.""" if self._control_channel is None: self.log.debug("creating control channel queue") + assert self.channel_socket is not None self._control_channel = ChannelQueue("control", self.channel_socket, self.log) + assert self._channel_queues is not None self._channel_queues["control"] = self._control_channel return self._control_channel @@ -691,20 +851,27 @@ def _route_responses(self): """ try: while not self._channels_stopped: + assert self.channel_socket is not None raw_message = self.channel_socket.recv() if not raw_message: break response_message = json_decode(utf8(raw_message)) channel = response_message["channel"] + assert self._channel_queues is not None self._channel_queues[channel].put_nowait(response_message) except websocket.WebSocketConnectionClosedException: - pass # websocket closure most likely due to shutdown + pass # websocket closure most likely due to shut down except BaseException as be: if not self._channels_stopped: self.log.warning(f"Unexpected exception encountered ({be})") + # Notify channel queues that this thread had finished and no more messages are being received + assert self._channel_queues is not None + for channel_queue in self._channel_queues.values(): + channel_queue.response_router_finished = True + self.log.debug("Response router thread exiting...") diff --git a/jupyter_server/i18n/README.md b/jupyter_server/i18n/README.md index 17a475ce47..28562c2748 100644 --- a/jupyter_server/i18n/README.md +++ b/jupyter_server/i18n/README.md @@ -29,7 +29,9 @@ if running Ubuntu 14, you should set environment variable `LANGUAGE="xx_XX"`. **All i18n-related commands are done from the related directory :** - cd notebook/i18n/ +``` +cd notebook/i18n/ +``` ### Message extraction @@ -69,7 +71,9 @@ pybabel compile -D nbui -f -l ${LANG} -i ${LANG}/LC_MESSAGES/nbui.po -o ${LANG}/ _nbjs.po_ needs to be converted to JSON for use within the JavaScript code, with _po2json_, as follows: - po2json -p -F -f jed1.x -d nbjs ${LANG}/LC_MESSAGES/nbjs.po ${LANG}/LC_MESSAGES/nbjs.json +``` +po2json -p -F -f jed1.x -d nbjs ${LANG}/LC_MESSAGES/nbjs.po ${LANG}/LC_MESSAGES/nbjs.json +``` When new languages get added, their language codes should be added to _notebook/i18n/nbjs.json_ under the `supported_languages` element. @@ -111,21 +115,25 @@ to handle these cases properly. ### Known issues and future evolutions -1. Right now there are two different places where the desired language is set. At startup time, the Jupyter console's messages pay attention to the setting of the `${LANG}` environment variable - as set in the shell at startup time. Unfortunately, this is also the time where the Jinja2 - environment is set up, which means that the template stuff will always come from this setting. - We really want to be paying attention to the browser's settings for the stuff that happens in the - browser, so we need to be able to retrieve this information after the browser is started and somehow - communicate this back to Jinja2. So far, I haven't yet figured out how to do this, which means that if the ${LANG} at startup doesn't match the browser's settings, you could potentially get a mix - of languages in the UI ( never a good thing ). - -2. We will need to decide if console messages should be translatable, and enable them if desired. -3. The keyboard shortcut editor was implemented after the i18n work was completed, so that portion - does not have translation support at this time. -4. Babel's documentation has instructions on how to integrate messages extraction - into your _setup.py_ so that eventually we can just do: - - ./setup.py extract_messages +1. Right now there are two different places where the desired language is set. At startup time, the Jupyter console's messages pay attention to the setting of the `${LANG}` environment variable + as set in the shell at startup time. Unfortunately, this is also the time where the Jinja2 + environment is set up, which means that the template stuff will always come from this setting. + We really want to be paying attention to the browser's settings for the stuff that happens in the + browser, so we need to be able to retrieve this information after the browser is started and somehow + communicate this back to Jinja2. So far, I haven't yet figured out how to do this, which means that if the ${LANG} at startup doesn't match the browser's settings, you could potentially get a mix + of languages in the UI ( never a good thing ). + +1. We will need to decide if console messages should be translatable, and enable them if desired. + +1. The keyboard shortcut editor was implemented after the i18n work was completed, so that portion + does not have translation support at this time. + +1. Babel's documentation has instructions on how to integrate messages extraction + into your _setup.py_ so that eventually we can just do: + + ``` + ./setup.py extract_messages + ``` I hope to get this working at some point in the near future. 5. The conversions from `.po` to `.mo` probably can and should be done using `setup.py install`. diff --git a/jupyter_server/i18n/__init__.py b/jupyter_server/i18n/__init__.py index e44aa11393..896f41c57c 100644 --- a/jupyter_server/i18n/__init__.py +++ b/jupyter_server/i18n/__init__.py @@ -1,11 +1,14 @@ """Server functions for loading translations """ +from __future__ import annotations + import errno import json import re from collections import defaultdict from os.path import dirname from os.path import join as pjoin +from typing import Any I18N_DIR = dirname(__file__) # Cache structure: @@ -15,7 +18,7 @@ # ... # } # }} -TRANSLATIONS_CACHE = {"nbjs": {}} +TRANSLATIONS_CACHE: dict[str, Any] = {"nbjs": {}} _accept_lang_re = re.compile( @@ -42,10 +45,7 @@ def parse_accept_lang_header(accept_lang): lang, qvalue = m.group("lang", "qvalue") # Browser header format is zh-CN, gettext uses zh_CN lang = lang.replace("-", "_") - if qvalue is None: - qvalue = 1.0 - else: - qvalue = float(qvalue) + qvalue = 1.0 if qvalue is None else float(qvalue) if qvalue == 0: continue # 0 means not accepted by_q[qvalue].append(lang) @@ -59,7 +59,7 @@ def parse_accept_lang_header(accept_lang): def load(language, domain="nbjs"): """Load translations from an nbjs.json file""" try: - f = open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") + f = open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") # noqa: SIM115 except OSError as e: if e.errno != errno.ENOENT: raise @@ -87,7 +87,7 @@ def combine_translations(accept_language, domain="nbjs"): Returns data re-packaged in jed1.x format. """ lang_codes = parse_accept_lang_header(accept_language) - combined = {} + combined: dict[str, Any] = {} for language in lang_codes: if language == "en": # en is default, all translations are in frontend. diff --git a/jupyter_server/i18n/notebook.pot b/jupyter_server/i18n/notebook.pot index 333b40d76c..b8d588f964 100644 --- a/jupyter_server/i18n/notebook.pot +++ b/jupyter_server/i18n/notebook.pot @@ -280,7 +280,7 @@ msgid "server_extensions is deprecated, use jpserver_extensions" msgstr "" #: jupyter_server/serverapp.py:1040 -msgid "Dict of Python modules to load as notebook server extensions. Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order." +msgid "Dict of Python modules to load as notebook server extensions. Entry values can be used to enable and disable the loading of the extensions. The extensions will be loaded in alphabetical order." msgstr "" #: jupyter_server/serverapp.py:1049 diff --git a/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po b/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po index ee74a2097c..8f65bd35bf 100644 --- a/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po +++ b/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po @@ -283,7 +283,7 @@ msgid "No such notebook dir: '%r'" msgstr "没有找到路径: '%r' " #: notebook/serverapp.py:1046 -msgid "Dict of Python modules to load as notebook server extensions.Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order." +msgid "Dict of Python modules to load as notebook server extensions.Entry values can be used to enable and disable the loading of the extensions. The extensions will be loaded in alphabetical order." msgstr "将Python模块作为笔记本服务器扩展加载。可以使用条目值来启用和禁用扩展的加载。这些扩展将以字母顺序加载。" #: notebook/serverapp.py:1055 diff --git a/jupyter_server/kernelspecs/handlers.py b/jupyter_server/kernelspecs/handlers.py index 3ac8506a31..c7cb141459 100644 --- a/jupyter_server/kernelspecs/handlers.py +++ b/jupyter_server/kernelspecs/handlers.py @@ -1,6 +1,10 @@ +"""Kernelspecs API Handlers.""" +import mimetypes + +from jupyter_core.utils import ensure_async from tornado import web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from ..base.handlers import JupyterHandler from ..services.kernelspecs.handlers import kernel_name_regex @@ -9,29 +13,55 @@ class KernelSpecResourceHandler(web.StaticFileHandler, JupyterHandler): - SUPPORTED_METHODS = ("GET", "HEAD") + """A Kernelspec resource handler.""" + + SUPPORTED_METHODS = ("GET", "HEAD") # type:ignore[assignment] auth_resource = AUTH_RESOURCE def initialize(self): + """Initialize a kernelspec resource handler.""" web.StaticFileHandler.initialize(self, path="") @web.authenticated @authorized - def get(self, kernel_name, path, include_body=True): + async def get(self, kernel_name, path, include_body=True): + """Get a kernelspec resource.""" ksm = self.kernel_spec_manager if path.lower().endswith(".png"): self.set_header("Cache-Control", f"max-age={60*60*24*30}") + ksm = self.kernel_spec_manager + if hasattr(ksm, "get_kernel_spec_resource"): + # If the kernel spec manager defines a method to get kernelspec resources, + # then use that instead of trying to read from disk. + kernel_spec_res = await ksm.get_kernel_spec_resource(kernel_name, path) + if kernel_spec_res is not None: + # We have to explicitly specify the `absolute_path` attribute so that + # the underlying StaticFileHandler methods can calculate an etag. + self.absolute_path = path + mimetype: str = mimetypes.guess_type(path)[0] or "text/plain" + self.set_header("Content-Type", mimetype) + self.finish(kernel_spec_res) + return None + else: + self.log.warning( + "Kernelspec resource '{}' for '{}' not found. Kernel spec manager may" + " not support resource serving. Falling back to reading from disk".format( + path, kernel_name + ) + ) try: - self.root = ksm.get_kernel_spec(kernel_name).resource_dir + kspec = await ensure_async(ksm.get_kernel_spec(kernel_name)) + self.root = kspec.resource_dir except KeyError as e: raise web.HTTPError(404, "Kernel spec %s not found" % kernel_name) from e self.log.debug("Serving kernel resource from: %s", self.root) - return web.StaticFileHandler.get(self, path, include_body=include_body) + return await web.StaticFileHandler.get(self, path, include_body=include_body) @web.authenticated @authorized - def head(self, kernel_name, path): - return self.get(kernel_name, path, include_body=False) + async def head(self, kernel_name, path): + """Get the head info for a kernel resource.""" + return await ensure_async(self.get(kernel_name, path, include_body=False)) default_handlers = [ diff --git a/jupyter_server/log.py b/jupyter_server/log.py index d23799456d..705eaaf44c 100644 --- a/jupyter_server/log.py +++ b/jupyter_server/log.py @@ -1,15 +1,44 @@ +"""Log utilities.""" # ----------------------------------------------------------------------------- # Copyright (c) Jupyter Development Team # # Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. +# the file LICENSE, distributed as part of this software. # ----------------------------------------------------------------------------- import json +from urllib.parse import urlparse, urlunparse from tornado.log import access_log +from .auth import User from .prometheus.log_functions import prometheus_log_method +# url params to be scrubbed if seen +# any url param that *contains* one of these +# will be scrubbed from logs +_SCRUB_PARAM_KEYS = {"token", "auth", "key", "code", "state", "xsrf"} + + +def _scrub_uri(uri: str) -> str: + """scrub auth info from uri""" + parsed = urlparse(uri) + if parsed.query: + # check for potentially sensitive url params + # use manual list + split rather than parsing + # to minimally perturb original + parts = parsed.query.split("&") + changed = False + for i, s in enumerate(parts): + key, sep, value = s.partition("=") + for substring in _SCRUB_PARAM_KEYS: + if substring in key: + parts[i] = f"{key}{sep}[secret]" + changed = True + if changed: + parsed = parsed._replace(query="&".join(parts)) + return urlunparse(parsed) + return uri + def log_request(handler): """log a bit more information about each request than tornado's default @@ -37,17 +66,27 @@ def log_request(handler): log_method = logger.error request_time = 1000.0 * handler.request.request_time() - ns = dict( - status=status, - method=request.method, - ip=request.remote_ip, - uri=request.uri, - request_time=request_time, - ) - msg = "{status} {method} {uri} ({ip}) {request_time:.2f}ms" + ns = { + "status": status, + "method": request.method, + "ip": request.remote_ip, + "uri": _scrub_uri(request.uri), + "request_time": request_time, + } + # log username + # make sure we don't break anything + # in case mixins cause current_user to not be a User somehow + try: + user = handler.current_user + except Exception: + user = None + username = (user.username if isinstance(user, User) else "unknown") if user else "" + ns["username"] = username + + msg = "{status} {method} {uri} ({username}@{ip}) {request_time:.2f}ms" if status >= 400: - # log bad referers - ns["referer"] = request.headers.get("Referer", "None") + # log bad referrers + ns["referer"] = _scrub_uri(request.headers.get("Referer", "None")) msg = msg + " referer={referer}" if status >= 500 and status != 502: # Log a subset of the headers if it caused an error. diff --git a/jupyter_server/nbconvert/handlers.py b/jupyter_server/nbconvert/handlers.py index c5e3840699..b7a39d0c8b 100644 --- a/jupyter_server/nbconvert/handlers.py +++ b/jupyter_server/nbconvert/handlers.py @@ -7,12 +7,12 @@ import zipfile from anyio.to_thread import run_sync +from jupyter_core.utils import ensure_async from nbformat import from_dict from tornado import web from tornado.log import app_log -from jupyter_server.auth import authorized -from jupyter_server.utils import ensure_async +from jupyter_server.auth.decorator import authorized from ..base.handlers import FilesRedirectHandler, JupyterHandler, path_regex @@ -27,6 +27,7 @@ def find_resource_files(output_files_dir): + """Find the resource files in a directory.""" files = [] for dirpath, _, filenames in os.walk(output_files_dir): files.extend([os.path.join(dirpath, f) for f in filenames]) @@ -85,13 +86,15 @@ def get_exporter(format, **kwargs): class NbconvertFileHandler(JupyterHandler): + """An nbconvert file handler.""" auth_resource = AUTH_RESOURCE - SUPPORTED_METHODS = ("GET",) + SUPPORTED_METHODS = ("GET",) # type:ignore[assignment] @web.authenticated @authorized async def get(self, format, path): + """Get a notebook file in a desired format.""" self.check_xsrf_cookie() exporter = get_exporter(format, config=self.config, log=self.log) @@ -132,11 +135,11 @@ async def get(self, format, path): lambda: exporter.from_notebook_node(nb, resources=resource_dict) ) except Exception as e: - self.log.exception("nbconvert failed: %s", e) + self.log.exception("nbconvert failed: %r", e) raise web.HTTPError(500, "nbconvert failed: %s" % e) from e if respond_zip(self, name, output, resources): - return + return None # Force download if requested if self.get_argument("download", "false").lower() == "true": @@ -152,16 +155,19 @@ async def get(self, format, path): class NbconvertPostHandler(JupyterHandler): + """An nbconvert post handler.""" - SUPPORTED_METHODS = ("POST",) + SUPPORTED_METHODS = ("POST",) # type:ignore[assignment] auth_resource = AUTH_RESOURCE @web.authenticated @authorized async def post(self, format): + """Convert a notebook file to a desired format.""" exporter = get_exporter(format, config=self.config) model = self.get_json_body() + assert model is not None name = model.get("name", "notebook.ipynb") nbnode = from_dict(model["content"]) diff --git a/jupyter_server/prometheus/log_functions.py b/jupyter_server/prometheus/log_functions.py index 4f0d497b6c..ac4bd620c1 100644 --- a/jupyter_server/prometheus/log_functions.py +++ b/jupyter_server/prometheus/log_functions.py @@ -1,4 +1,5 @@ -from .metrics import HTTP_REQUEST_DURATION_SECONDS +"""Log functions for prometheus""" +from .metrics import HTTP_REQUEST_DURATION_SECONDS # type:ignore[unused-ignore] def prometheus_log_method(handler): diff --git a/jupyter_server/prometheus/metrics.py b/jupyter_server/prometheus/metrics.py index ae98043c3e..1a02f86209 100644 --- a/jupyter_server/prometheus/metrics.py +++ b/jupyter_server/prometheus/metrics.py @@ -16,7 +16,6 @@ ) except ImportError: - from prometheus_client import Gauge, Histogram HTTP_REQUEST_DURATION_SECONDS = Histogram( @@ -35,3 +34,10 @@ "counter for how many kernels are running labeled by type", ["type"], ) + + +__all__ = [ + "HTTP_REQUEST_DURATION_SECONDS", + "TERMINAL_CURRENTLY_RUNNING_TOTAL", + "KERNEL_CURRENTLY_RUNNING_TOTAL", +] diff --git a/.gitmodules b/jupyter_server/py.typed similarity index 100% rename from .gitmodules rename to jupyter_server/py.typed diff --git a/jupyter_server/pytest_plugin.py b/jupyter_server/pytest_plugin.py index 7b35795c63..f77448f866 100644 --- a/jupyter_server/pytest_plugin.py +++ b/jupyter_server/pytest_plugin.py @@ -1,409 +1,15 @@ +"""Pytest Fixtures exported by Jupyter Server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import io import json -import logging -import os -import shutil -import sys -import urllib.parse -from binascii import hexlify +from pathlib import Path -import jupyter_core.paths -import nbformat import pytest -import tornado -from tornado.escape import url_escape -from traitlets.config import Config -from jupyter_server.extension import serverextension -from jupyter_server.serverapp import ServerApp -from jupyter_server.services.contents.filemanager import FileContentsManager -from jupyter_server.services.contents.largefilemanager import LargeFileManager -from jupyter_server.utils import url_path_join - -# List of dependencies needed for this plugin. -pytest_plugins = [ - "pytest_tornasync", - # Once the chunk below moves to Jupyter Core, we'll uncomment - # This plugin and use the fixtures directly from Jupyter Core. - # "jupyter_core.pytest_plugin" -] - - -import asyncio - -if os.name == "nt" and sys.version_info >= (3, 7): - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - - -# ============ Move to Jupyter Core ============= - - -def mkdir(tmp_path, *parts): - path = tmp_path.joinpath(*parts) - if not path.exists(): - path.mkdir(parents=True) - return path - - -@pytest.fixture -def jp_home_dir(tmp_path): - """Provides a temporary HOME directory value.""" - return mkdir(tmp_path, "home") - - -@pytest.fixture -def jp_data_dir(tmp_path): - """Provides a temporary Jupyter data dir directory value.""" - return mkdir(tmp_path, "data") - - -@pytest.fixture -def jp_config_dir(tmp_path): - """Provides a temporary Jupyter config dir directory value.""" - return mkdir(tmp_path, "config") - - -@pytest.fixture -def jp_runtime_dir(tmp_path): - """Provides a temporary Jupyter runtime dir directory value.""" - return mkdir(tmp_path, "runtime") - - -@pytest.fixture -def jp_system_jupyter_path(tmp_path): - """Provides a temporary Jupyter system path value.""" - return mkdir(tmp_path, "share", "jupyter") - - -@pytest.fixture -def jp_env_jupyter_path(tmp_path): - """Provides a temporary Jupyter env system path value.""" - return mkdir(tmp_path, "env", "share", "jupyter") - - -@pytest.fixture -def jp_system_config_path(tmp_path): - """Provides a temporary Jupyter config path value.""" - return mkdir(tmp_path, "etc", "jupyter") - - -@pytest.fixture -def jp_env_config_path(tmp_path): - """Provides a temporary Jupyter env config path value.""" - return mkdir(tmp_path, "env", "etc", "jupyter") - - -@pytest.fixture -def jp_environ( - monkeypatch, - tmp_path, - jp_home_dir, - jp_data_dir, - jp_config_dir, - jp_runtime_dir, - jp_system_jupyter_path, - jp_system_config_path, - jp_env_jupyter_path, - jp_env_config_path, -): - """Configures a temporary environment based on Jupyter-specific environment variables.""" - monkeypatch.setenv("HOME", str(jp_home_dir)) - monkeypatch.setenv("PYTHONPATH", os.pathsep.join(sys.path)) - # monkeypatch.setenv("JUPYTER_NO_CONFIG", "1") - monkeypatch.setenv("JUPYTER_CONFIG_DIR", str(jp_config_dir)) - monkeypatch.setenv("JUPYTER_DATA_DIR", str(jp_data_dir)) - monkeypatch.setenv("JUPYTER_RUNTIME_DIR", str(jp_runtime_dir)) - monkeypatch.setattr(jupyter_core.paths, "SYSTEM_JUPYTER_PATH", [str(jp_system_jupyter_path)]) - monkeypatch.setattr(jupyter_core.paths, "ENV_JUPYTER_PATH", [str(jp_env_jupyter_path)]) - monkeypatch.setattr(jupyter_core.paths, "SYSTEM_CONFIG_PATH", [str(jp_system_config_path)]) - monkeypatch.setattr(jupyter_core.paths, "ENV_CONFIG_PATH", [str(jp_env_config_path)]) - - -# ================= End: Move to Jupyter core ================ - - -@pytest.fixture -def jp_server_config(): - """Allows tests to setup their specific configuration values.""" - return {} - - -@pytest.fixture -def jp_root_dir(tmp_path): - """Provides a temporary Jupyter root directory value.""" - return mkdir(tmp_path, "root_dir") - - -@pytest.fixture -def jp_template_dir(tmp_path): - """Provides a temporary Jupyter templates directory value.""" - return mkdir(tmp_path, "templates") - - -@pytest.fixture -def jp_argv(): - """Allows tests to setup specific argv values.""" - return [] - - -@pytest.fixture -def jp_extension_environ(jp_env_config_path, monkeypatch): - """Monkeypatch a Jupyter Extension's config path into each test's environment variable""" - monkeypatch.setattr(serverextension, "ENV_CONFIG_PATH", [str(jp_env_config_path)]) - - -@pytest.fixture -def jp_http_port(http_server_port): - """Returns the port value from the http_server_port fixture.""" - return http_server_port[-1] - - -@pytest.fixture -def jp_nbconvert_templates(jp_data_dir): - """Setups up a temporary directory consisting of the nbconvert templates.""" - - # Get path to nbconvert template directory *before* - # monkeypatching the paths env variable via the jp_environ fixture. - possible_paths = jupyter_core.paths.jupyter_path("nbconvert", "templates") - nbconvert_path = None - for path in possible_paths: - if os.path.exists(path): - nbconvert_path = path - break - - nbconvert_target = jp_data_dir / "nbconvert" / "templates" - - # copy nbconvert templates to new tmp data_dir. - if nbconvert_path: - shutil.copytree(nbconvert_path, str(nbconvert_target)) - - -@pytest.fixture -def jp_logging_stream(): - """StringIO stream intended to be used by the core - Jupyter ServerApp logger's default StreamHandler. This - helps avoid collision with stdout which is hijacked - by Pytest. - """ - logging_stream = io.StringIO() - yield logging_stream - output = logging_stream.getvalue() - # If output exists, print it. - if output: - print(output) - return output - - -@pytest.fixture(scope="function") -def jp_configurable_serverapp( - jp_nbconvert_templates, # this fixture must preceed jp_environ - jp_environ, - jp_server_config, - jp_argv, - jp_http_port, - jp_base_url, - tmp_path, - jp_root_dir, - io_loop, - jp_logging_stream, -): - """Starts a Jupyter Server instance based on - the provided configuration values. - - The fixture is a factory; it can be called like - a function inside a unit test. Here's a basic - example of how use this fixture: - - .. code-block:: python - - def my_test(jp_configurable_serverapp): - - app = jp_configurable_serverapp(...) - ... - """ - ServerApp.clear_instance() - - def _configurable_serverapp( - config=jp_server_config, - base_url=jp_base_url, - argv=jp_argv, - environ=jp_environ, - http_port=jp_http_port, - tmp_path=tmp_path, - root_dir=jp_root_dir, - **kwargs, - ): - c = Config(config) - c.NotebookNotary.db_file = ":memory:" - token = hexlify(os.urandom(4)).decode("ascii") - app = ServerApp.instance( - # Set the log level to debug for testing purposes - log_level="DEBUG", - port=http_port, - port_retries=0, - open_browser=False, - root_dir=str(root_dir), - base_url=base_url, - config=c, - allow_root=True, - token=token, - **kwargs, - ) - - app.init_signal = lambda: None - app.log.propagate = True - app.log.handlers = [] - # Initialize app without httpserver - app.initialize(argv=argv, new_httpserver=False) - # Reroute all logging StreamHandlers away from stdin/stdout since pytest hijacks - # these streams and closes them at unfortunate times. - stream_handlers = [h for h in app.log.handlers if isinstance(h, logging.StreamHandler)] - for handler in stream_handlers: - handler.setStream(jp_logging_stream) - app.log.propagate = True - app.log.handlers = [] - # Start app without ioloop - app.start_app() - return app - - return _configurable_serverapp - - -@pytest.fixture -def jp_ensure_app_fixture(request): - """Ensures that the 'app' fixture used by pytest-tornasync - is set to `jp_web_app`, the Tornado Web Application returned - by the ServerApp in Jupyter Server, provided by the jp_web_app - fixture in this module. - - Note, this hardcodes the `app_fixture` option from - pytest-tornasync to `jp_web_app`. If this value is configured - to something other than the default, it will raise an exception. - """ - app_option = request.config.getoption("app_fixture") - if app_option not in ["app", "jp_web_app"]: - raise Exception( - "jp_serverapp requires the `app-fixture` option " - "to be set to 'jp_web_app`. Try rerunning the " - "current tests with the option `--app-fixture " - "jp_web_app`." - ) - elif app_option == "app": - # Manually set the app_fixture to `jp_web_app` if it's - # not set already. - request.config.option.app_fixture = "jp_web_app" - - -@pytest.fixture(scope="function") -def jp_serverapp(jp_ensure_app_fixture, jp_server_config, jp_argv, jp_configurable_serverapp): - """Starts a Jupyter Server instance based on the established configuration values.""" - app = jp_configurable_serverapp(config=jp_server_config, argv=jp_argv) - yield app - app.remove_server_info_file() - app.remove_browser_open_files() - - -@pytest.fixture -def jp_web_app(jp_serverapp): - """app fixture is needed by pytest_tornasync plugin""" - return jp_serverapp.web_app - - -@pytest.fixture -def jp_auth_header(jp_serverapp): - """Configures an authorization header using the token from the serverapp fixture.""" - return {"Authorization": f"token {jp_serverapp.token}"} - - -@pytest.fixture -def jp_base_url(): - """Returns the base url to use for the test.""" - return "/a%40b/" - - -@pytest.fixture -def jp_fetch(jp_serverapp, http_server_client, jp_auth_header, jp_base_url): - """Sends an (asynchronous) HTTP request to a test server. - - The fixture is a factory; it can be called like - a function inside a unit test. Here's a basic - example of how use this fixture: - - .. code-block:: python - - async def my_test(jp_fetch): - - response = await jp_fetch("api", "spec.yaml") - ... - """ - - def client_fetch(*parts, headers=None, params=None, **kwargs): - if not headers: - headers = {} - if not params: - params = {} - # Handle URL strings - path_url = url_escape(url_path_join(*parts), plus=False) - base_path_url = url_path_join(jp_base_url, path_url) - params_url = urllib.parse.urlencode(params) - url = base_path_url + "?" + params_url - # Add auth keys to header - headers.update(jp_auth_header) - # Make request. - return http_server_client.fetch(url, headers=headers, request_timeout=20, **kwargs) - - return client_fetch - - -@pytest.fixture -def jp_ws_fetch(jp_serverapp, http_server_client, jp_auth_header, jp_http_port, jp_base_url): - """Sends a websocket request to a test server. - - The fixture is a factory; it can be called like - a function inside a unit test. Here's a basic - example of how use this fixture: - - .. code-block:: python - - async def my_test(jp_fetch, jp_ws_fetch): - # Start a kernel - r = await jp_fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': "python3" - }) - ) - kid = json.loads(r.body.decode())['id'] - - # Open a websocket connection. - ws = await jp_ws_fetch( - 'api', 'kernels', kid, 'channels' - ) - ... - """ - - def client_fetch(*parts, headers=None, params=None, **kwargs): - if not headers: - headers = {} - if not params: - params = {} - # Handle URL strings - path_url = url_escape(url_path_join(*parts), plus=False) - base_path_url = url_path_join(jp_base_url, path_url) - urlparts = urllib.parse.urlparse(f"ws://localhost:{jp_http_port}") - urlparts = urlparts._replace(path=base_path_url, query=urllib.parse.urlencode(params)) - url = urlparts.geturl() - # Add auth keys to header - headers.update(jp_auth_header) - # Make request. - req = tornado.httpclient.HTTPRequest(url, headers=headers, connect_timeout=120) - return tornado.websocket.websocket_connect(req) - - return client_fetch +from jupyter_server.services.contents.filemanager import AsyncFileContentsManager +from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager +pytest_plugins = ["pytest_jupyter.jupyter_server"] some_resource = "The very model of a modern major general" sample_kernel_json = { @@ -412,8 +18,8 @@ def client_fetch(*parts, headers=None, params=None, **kwargs): } -@pytest.fixture -def jp_kernelspecs(jp_data_dir): +@pytest.fixture() # type:ignore[misc] +def jp_kernelspecs(jp_data_dir: Path) -> None: # noqa: PT004 """Configures some sample kernelspecs in the Jupyter data directory.""" spec_names = ["sample", "sample2", "bad"] for name in spec_names: @@ -432,79 +38,11 @@ def jp_kernelspecs(jp_data_dir): @pytest.fixture(params=[True, False]) def jp_contents_manager(request, tmp_path): - """Returns a FileContentsManager instance based on the use_atomic_writing parameter value.""" - return FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=request.param) + """Returns an AsyncFileContentsManager instance based on the use_atomic_writing parameter value.""" + return AsyncFileContentsManager(root_dir=str(tmp_path), use_atomic_writing=request.param) -@pytest.fixture +@pytest.fixture() def jp_large_contents_manager(tmp_path): - """Returns a LargeFileManager instance.""" - return LargeFileManager(root_dir=str(tmp_path)) - - -@pytest.fixture -def jp_create_notebook(jp_root_dir): - """Creates a notebook in the test's home directory.""" - - def inner(nbpath): - nbpath = jp_root_dir.joinpath(nbpath) - # Check that the notebook has the correct file extension. - if nbpath.suffix != ".ipynb": - raise Exception("File extension for notebook must be .ipynb") - # If the notebook path has a parent directory, make sure it's created. - parent = nbpath.parent - parent.mkdir(parents=True, exist_ok=True) - # Create a notebook string and write to file. - nb = nbformat.v4.new_notebook() - nbtext = nbformat.writes(nb, version=4) - nbpath.write_text(nbtext) - - return inner - - -@pytest.fixture(autouse=True) -def jp_server_cleanup(): - yield - ServerApp.clear_instance() - - -@pytest.fixture -def jp_cleanup_subprocesses(jp_serverapp): - """Clean up subprocesses started by a Jupyter Server, i.e. kernels and terminal.""" - - async def _(): - terminal_cleanup = jp_serverapp.web_app.settings["terminal_manager"].terminate_all - kernel_cleanup = jp_serverapp.kernel_manager.shutdown_all - - async def kernel_cleanup_steps(): - # Try a graceful shutdown with a timeout - try: - await asyncio.wait_for(kernel_cleanup(), timeout=15.0) - except asyncio.TimeoutError: - # Now force a shutdown - try: - await asyncio.wait_for(kernel_cleanup(now=True), timeout=15.0) - except asyncio.TimeoutError: - print(Exception("Kernel never shutdown!")) - except Exception as e: - print(e) - - if asyncio.iscoroutinefunction(terminal_cleanup): - try: - await terminal_cleanup() - except Exception as e: - print(e) - else: - try: - await terminal_cleanup() - except Exception as e: - print(e) - if asyncio.iscoroutinefunction(kernel_cleanup): - await kernel_cleanup_steps() - else: - try: - kernel_cleanup() - except Exception as e: - print(e) - - return _ + """Returns an AsyncLargeFileManager instance.""" + return AsyncLargeFileManager(root_dir=str(tmp_path)) diff --git a/jupyter_server/serverapp.py b/jupyter_server/serverapp.py index f2c337d404..9e4a57375d 100644 --- a/jupyter_server/serverapp.py +++ b/jupyter_server/serverapp.py @@ -1,13 +1,13 @@ """A tornado based Jupyter server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import binascii +from __future__ import annotations + import datetime import errno import gettext import hashlib import hmac -import inspect import ipaddress import json import logging @@ -23,46 +23,28 @@ import sys import threading import time +import typing as t import urllib import warnings -import webbrowser from base64 import encodebytes +from pathlib import Path -try: - import resource -except ImportError: - # Windows - resource = None - -from jinja2 import Environment, FileSystemLoader -from jupyter_core.paths import secure_write - -from jupyter_server.transutils import _i18n, trans -from jupyter_server.utils import pathname2url, run_sync_in_loop, urljoin - -# the minimum viable tornado version: needs to be kept in sync with setup.py -MIN_TORNADO = (6, 1, 0) - -try: - import tornado - - assert tornado.version_info >= MIN_TORNADO -except (ImportError, AttributeError, AssertionError) as e: # pragma: no cover - raise ImportError(_i18n("The Jupyter Server requires tornado >=%s.%s.%s") % MIN_TORNADO) from e - +import jupyter_client +from jupyter_client.kernelspec import KernelSpecManager +from jupyter_client.manager import KernelManager +from jupyter_client.session import Session +from jupyter_core.application import JupyterApp, base_aliases, base_flags +from jupyter_core.paths import jupyter_runtime_dir +from jupyter_events.logger import EventLogger +from nbformat.sign import NotebookNotary from tornado import httpserver, ioloop, web from tornado.httputil import url_concat from tornado.log import LogFormatter, access_log, app_log, gen_log +from tornado.netutil import bind_sockets if not sys.platform.startswith("win"): from tornado.netutil import bind_unix_socket -from jupyter_client import KernelManager -from jupyter_client.kernelspec import KernelSpecManager -from jupyter_client.session import Session -from jupyter_core.application import JupyterApp, base_aliases, base_flags -from jupyter_core.paths import jupyter_runtime_dir -from nbformat.sign import NotebookNotary from traitlets import ( Any, Bool, @@ -75,6 +57,7 @@ TraitError, Type, Unicode, + Union, default, observe, validate, @@ -83,14 +66,21 @@ from traitlets.config.application import boolean_flag, catch_config_error from jupyter_server import ( + DEFAULT_EVENTS_SCHEMA_PATH, DEFAULT_JUPYTER_SERVER_PORT, DEFAULT_STATIC_FILES_PATH, DEFAULT_TEMPLATE_PATH_LIST, + JUPYTER_SERVER_EVENTS_URI, __version__, ) from jupyter_server._sysinfo import get_sys_info from jupyter_server._tz import utcnow from jupyter_server.auth.authorizer import AllowAllAuthorizer, Authorizer +from jupyter_server.auth.identity import ( + IdentityProvider, + LegacyIdentityProvider, + PasswordIdentityProvider, +) from jupyter_server.auth.login import LoginHandler from jupyter_server.auth.logout import LogoutHandler from jupyter_server.base.handlers import ( @@ -102,8 +92,9 @@ from jupyter_server.extension.config import ExtensionConfigManager from jupyter_server.extension.manager import ExtensionManager from jupyter_server.extension.serverextension import ServerExtensionApp +from jupyter_server.gateway.connections import GatewayWebSocketConnection +from jupyter_server.gateway.gateway_client import GatewayClient from jupyter_server.gateway.managers import ( - GatewayClient, GatewayKernelSpecManager, GatewayMappingKernelManager, GatewaySessionManager, @@ -114,17 +105,15 @@ AsyncFileContentsManager, FileContentsManager, ) -from jupyter_server.services.contents.largefilemanager import LargeFileManager -from jupyter_server.services.contents.manager import ( - AsyncContentsManager, - ContentsManager, -) +from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager +from jupyter_server.services.contents.manager import AsyncContentsManager, ContentsManager +from jupyter_server.services.kernels.connection.base import BaseKernelWebsocketConnection +from jupyter_server.services.kernels.connection.channels import ZMQChannelsWebsocketConnection from jupyter_server.services.kernels.kernelmanager import ( AsyncMappingKernelManager, MappingKernelManager, ) from jupyter_server.services.sessions.sessionmanager import SessionManager -from jupyter_server.traittypes import TypeFromClasses from jupyter_server.utils import ( check_pid, fetch, @@ -134,13 +123,34 @@ urlencode_unix_socket_path, ) -# Tolerate missing terminado package. try: - from jupyter_server.terminal import TerminalManager + import resource +except ImportError: + # Windows + resource = None # type:ignore[assignment] + +from jinja2 import Environment, FileSystemLoader +from jupyter_core.paths import secure_write +from jupyter_core.utils import ensure_async + +from jupyter_server.transutils import _i18n, trans +from jupyter_server.utils import pathname2url, urljoin + +# the minimum viable tornado version: needs to be kept in sync with setup.py +MIN_TORNADO = (6, 1, 0) + +try: + import tornado - terminado_available = True + assert tornado.version_info >= MIN_TORNADO +except (ImportError, AttributeError, AssertionError) as e: # pragma: no cover + raise ImportError(_i18n("The Jupyter Server requires tornado >=%s.%s.%s") % MIN_TORNADO) from e + +try: + import resource except ImportError: - terminado_available = False + # Windows + resource = None # type:ignore[assignment] # ----------------------------------------------------------------------------- # Module globals @@ -152,26 +162,29 @@ jupyter server password # enter a password to protect the server """ -JUPYTER_SERVICE_HANDLERS = dict( - auth=None, - api=["jupyter_server.services.api.handlers"], - config=["jupyter_server.services.config.handlers"], - contents=["jupyter_server.services.contents.handlers"], - files=["jupyter_server.files.handlers"], - kernels=["jupyter_server.services.kernels.handlers"], - kernelspecs=[ +JUPYTER_SERVICE_HANDLERS = { + "auth": None, + "api": ["jupyter_server.services.api.handlers"], + "config": ["jupyter_server.services.config.handlers"], + "contents": ["jupyter_server.services.contents.handlers"], + "files": ["jupyter_server.files.handlers"], + "kernels": [ + "jupyter_server.services.kernels.handlers", + ], + "kernelspecs": [ "jupyter_server.kernelspecs.handlers", "jupyter_server.services.kernelspecs.handlers", ], - nbconvert=[ + "nbconvert": [ "jupyter_server.nbconvert.handlers", "jupyter_server.services.nbconvert.handlers", ], - security=["jupyter_server.services.security.handlers"], - sessions=["jupyter_server.services.sessions.handlers"], - shutdown=["jupyter_server.services.shutdown"], - view=["jupyter_server.view.handlers"], -) + "security": ["jupyter_server.services.security.handlers"], + "sessions": ["jupyter_server.services.sessions.handlers"], + "shutdown": ["jupyter_server.services.shutdown"], + "view": ["jupyter_server.view.handlers"], + "events": ["jupyter_server.services.events.handlers"], +} # Added for backwards compatibility from classic notebook server. DEFAULT_SERVER_PORT = DEFAULT_JUPYTER_SERVER_PORT @@ -181,7 +194,7 @@ # ----------------------------------------------------------------------------- -def random_ports(port, n): +def random_ports(port: int, n: int) -> t.Generator[int, None, None]: """Generate a list of n random ports near the given port. The first 5 ports will be sequential, and the remaining n-5 will be @@ -193,7 +206,7 @@ def random_ports(port, n): yield max(1, port + random.randint(-2 * n, 2 * n)) -def load_handlers(name): +def load_handlers(name: str) -> t.Any: """Load the (URL pattern, handler) tuples for each component.""" mod = __import__(name, fromlist=["default_handlers"]) return mod.default_handlers @@ -205,6 +218,8 @@ def load_handlers(name): class ServerWebApplication(web.Application): + """A server web application.""" + def __init__( self, jupyter_app, @@ -214,14 +229,28 @@ def __init__( session_manager, kernel_spec_manager, config_manager, + event_logger, extra_services, log, base_url, default_url, settings_overrides, jinja_env_options, + *, authorizer=None, + identity_provider=None, + kernel_websocket_connection_class=None, ): + """Initialize a server web application.""" + if identity_provider is None: + warnings.warn( + "identity_provider unspecified. Using default IdentityProvider." + " Specify an identity_provider to avoid this message.", + RuntimeWarning, + stacklevel=2, + ) + identity_provider = IdentityProvider(parent=jupyter_app) + if authorizer is None: warnings.warn( "authorizer unspecified. Using permissive AllowAllAuthorizer." @@ -229,7 +258,7 @@ def __init__( RuntimeWarning, stacklevel=2, ) - authorizer = AllowAllAuthorizer(jupyter_app) + authorizer = AllowAllAuthorizer(parent=jupyter_app, identity_provider=identity_provider) settings = self.init_settings( jupyter_app, @@ -238,6 +267,7 @@ def __init__( session_manager, kernel_spec_manager, config_manager, + event_logger, extra_services, log, base_url, @@ -245,6 +275,8 @@ def __init__( settings_overrides, jinja_env_options, authorizer=authorizer, + identity_provider=identity_provider, + kernel_websocket_connection_class=kernel_websocket_connection_class, ) handlers = self.init_handlers(default_services, settings) @@ -258,15 +290,19 @@ def init_settings( session_manager, kernel_spec_manager, config_manager, + event_logger, extra_services, log, base_url, default_url, settings_overrides, jinja_env_options=None, + *, authorizer=None, + identity_provider=None, + kernel_websocket_connection_class=None, ): - + """Initialize settings for the web application.""" _template_path = settings_overrides.get( "template_path", jupyter_app.template_file_path, @@ -275,7 +311,7 @@ def init_settings( _template_path = (_template_path,) template_path = [os.path.expanduser(path) for path in _template_path] - jenv_opt = {"autoescape": True} + jenv_opt: dict[str, t.Any] = {"autoescape": True} jenv_opt.update(jinja_env_options if jinja_env_options else {}) env = Environment( @@ -292,11 +328,12 @@ def init_settings( env.install_gettext_translations(nbui, newstyle=False) if sys_info["commit_source"] == "repository": - # don't cache (rely on 304) when working from default branch + # don't cache (rely on 304) when working from master version_hash = "" else: # reset the cache on server restart - version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + utc = datetime.timezone.utc + version_hash = datetime.datetime.now(tz=utc).strftime("%Y%m%d%H%M%S") now = utcnow() @@ -306,64 +343,63 @@ def init_settings( # collapse $HOME to ~ root_dir = "~" + root_dir[len(home) :] - settings = dict( + settings = { # basics - log_function=log_request, - base_url=base_url, - default_url=default_url, - template_path=template_path, - static_path=jupyter_app.static_file_path, - static_custom_path=jupyter_app.static_custom_path, - static_handler_class=FileFindHandler, - static_url_prefix=url_path_join(base_url, "/static/"), - static_handler_args={ + "log_function": log_request, + "base_url": base_url, + "default_url": default_url, + "template_path": template_path, + "static_path": jupyter_app.static_file_path, + "static_custom_path": jupyter_app.static_custom_path, + "static_handler_class": FileFindHandler, + "static_url_prefix": url_path_join(base_url, "/static/"), + "static_handler_args": { # don't cache custom.js "no_cache_paths": [url_path_join(base_url, "static", "custom")], }, - version_hash=version_hash, - # kernel message protocol over websoclet - kernel_ws_protocol=jupyter_app.kernel_ws_protocol, + "version_hash": version_hash, + # kernel message protocol over websocket + "kernel_ws_protocol": jupyter_app.kernel_ws_protocol, # rate limits - limit_rate=jupyter_app.limit_rate, - iopub_msg_rate_limit=jupyter_app.iopub_msg_rate_limit, - iopub_data_rate_limit=jupyter_app.iopub_data_rate_limit, - rate_limit_window=jupyter_app.rate_limit_window, + "limit_rate": jupyter_app.limit_rate, + "iopub_msg_rate_limit": jupyter_app.iopub_msg_rate_limit, + "iopub_data_rate_limit": jupyter_app.iopub_data_rate_limit, + "rate_limit_window": jupyter_app.rate_limit_window, # authentication - cookie_secret=jupyter_app.cookie_secret, - login_url=url_path_join(base_url, "/login"), - login_handler_class=jupyter_app.login_handler_class, - logout_handler_class=jupyter_app.logout_handler_class, - password=jupyter_app.password, - xsrf_cookies=True, - disable_check_xsrf=jupyter_app.disable_check_xsrf, - allow_remote_access=jupyter_app.allow_remote_access, - local_hostnames=jupyter_app.local_hostnames, - authenticate_prometheus=jupyter_app.authenticate_prometheus, + "cookie_secret": jupyter_app.cookie_secret, + "login_url": url_path_join(base_url, "/login"), + "xsrf_cookies": True, + "disable_check_xsrf": jupyter_app.disable_check_xsrf, + "allow_remote_access": jupyter_app.allow_remote_access, + "local_hostnames": jupyter_app.local_hostnames, + "authenticate_prometheus": jupyter_app.authenticate_prometheus, # managers - kernel_manager=kernel_manager, - contents_manager=contents_manager, - session_manager=session_manager, - kernel_spec_manager=kernel_spec_manager, - config_manager=config_manager, - authorizer=authorizer, + "kernel_manager": kernel_manager, + "contents_manager": contents_manager, + "session_manager": session_manager, + "kernel_spec_manager": kernel_spec_manager, + "config_manager": config_manager, + "authorizer": authorizer, + "identity_provider": identity_provider, + "event_logger": event_logger, + "kernel_websocket_connection_class": kernel_websocket_connection_class, # handlers - extra_services=extra_services, + "extra_services": extra_services, # Jupyter stuff - started=now, + "started": now, # place for extensions to register activity # so that they can prevent idle-shutdown - last_activity_times={}, - jinja_template_vars=jupyter_app.jinja_template_vars, - websocket_url=jupyter_app.websocket_url, - shutdown_button=jupyter_app.quit_button, - config=jupyter_app.config, - config_dir=jupyter_app.config_dir, - allow_password_change=jupyter_app.allow_password_change, - server_root_dir=root_dir, - jinja2_env=env, - terminals_available=terminado_available and jupyter_app.terminals_enabled, - serverapp=jupyter_app, - ) + "last_activity_times": {}, + "jinja_template_vars": jupyter_app.jinja_template_vars, + "websocket_url": jupyter_app.websocket_url, + "shutdown_button": jupyter_app.quit_button, + "config": jupyter_app.config, + "config_dir": jupyter_app.config_dir, + "allow_password_change": jupyter_app.allow_password_change, + "server_root_dir": root_dir, + "jinja2_env": env, + "serverapp": jupyter_app, + } # allow custom overrides for the tornado web app. settings.update(settings_overrides) @@ -381,11 +417,6 @@ def init_handlers(self, default_services, settings): for service in settings["extra_services"]: handlers.extend(load_handlers(service)) - # Add auth services. - if "auth" in default_services: - handlers.extend([(r"/login", settings["login_handler_class"])]) - handlers.extend([(r"/logout", settings["logout_handler_class"])]) - # Load default services. Raise exception if service not # found in JUPYTER_SERVICE_HANLDERS. for service in default_services: @@ -395,26 +426,18 @@ def init_handlers(self, default_services, settings): for loc in locations: handlers.extend(load_handlers(loc)) else: - raise Exception( - "{} is not recognized as a jupyter_server " + msg = ( + f"{service} is not recognized as a jupyter_server " "service. If this is a custom service, " "try adding it to the " - "`extra_services` list.".format(service) + "`extra_services` list." ) + raise Exception(msg) # Add extra handlers from contents manager. handlers.extend(settings["contents_manager"].get_extra_handlers()) - - # If gateway mode is enabled, replace appropriate handlers to perform redirection - if GatewayClient.instance().gateway_enabled: - # for each handler required for gateway, locate its pattern - # in the current list and replace that entry... - gateway_handlers = load_handlers("jupyter_server.gateway.handlers") - for _, gwh in enumerate(gateway_handlers): - for j, h in enumerate(handlers): - if gwh[0] == h[0]: - handlers[j] = (gwh[0], gwh[1]) - break + # And from identity provider + handlers.extend(settings["identity_provider"].get_handlers()) # register base handlers last handlers.extend(load_handlers("jupyter_server.base.handlers")) @@ -438,7 +461,7 @@ def init_handlers(self, default_services, settings): new_handlers = [] for handler in handlers: pattern = url_path_join(settings["base_url"], handler[0]) - new_handler = tuple([pattern] + list(handler[1:])) + new_handler = (pattern, *list(handler[1:])) new_handlers.append(new_handler) # add 404 on the end, which will catch everything that falls through new_handlers.append((r"(.*)", Template404)) @@ -454,14 +477,12 @@ def last_activity(self): self.settings["started"], self.settings["kernel_manager"].last_kernel_activity, ] - try: - sources.append(self.settings["api_last_activity"]) - except KeyError: - pass - try: - sources.append(self.settings["terminal_last_activity"]) - except KeyError: - pass + # Any setting that ends with a key that ends with `_last_activity` is + # counted here. This provides a hook for extensions to add a last activity + # setting to the server. + sources.extend( + [val for key, val in self.settings.items() if key.endswith("_last_activity")] + ) sources.extend(self.settings["last_activity_times"].values()) return max(sources) @@ -473,12 +494,14 @@ class JupyterPasswordApp(JupyterApp): and removes the need for token-based authentication. """ - description = __doc__ + description: str = __doc__ def _config_file_default(self): + """the default config file.""" return os.path.join(self.config_dir, "jupyter_server_config.json") def start(self): + """Start the password app.""" from jupyter_server.auth.security import set_password set_password(config_file=self.config_file) @@ -500,11 +523,29 @@ def shutdown_server(server_info, timeout=5, log=None): url = server_info["url"] pid = server_info["pid"] + try: + shutdown_url = urljoin(url, "api/shutdown") + if log: + log.debug("POST request to %s", shutdown_url) + fetch( + shutdown_url, + method="POST", + body=b"", + headers={"Authorization": "token " + server_info["token"]}, + ) + except Exception as ex: + if not str(ex) == "Unknown URL scheme.": + raise ex + if log: + log.debug("Was not a HTTP scheme. Treating as socket instead.") + log.debug("POST request to %s", url) + fetch( + url, + method="POST", + body=b"", + headers={"Authorization": "token " + server_info["token"]}, + ) - if log: - log.debug("POST request to %sapi/shutdown", url) - - fetch(url, method="POST", headers={"Authorization": "token " + server_info["token"]}) # Poll to see if it shut down. for _ in range(timeout * 10): if not check_pid(pid): @@ -535,9 +576,10 @@ def shutdown_server(server_info, timeout=5, log=None): class JupyterServerStopApp(JupyterApp): + """An application to stop a Jupyter server.""" - version = __version__ - description = "Stop currently running Jupyter server for a given port" + version: str = __version__ + description: str = "Stop currently running Jupyter server for a given port" port = Integer( DEFAULT_JUPYTER_SERVER_PORT, @@ -548,6 +590,7 @@ class JupyterServerStopApp(JupyterApp): sock = Unicode("", config=True, help="UNIX socket of the server to be killed.") def parse_command_line(self, argv=None): + """Parse command line options.""" super().parse_command_line(argv) if self.extra_args: try: @@ -557,21 +600,26 @@ def parse_command_line(self, argv=None): self.sock = self.extra_args[0] def shutdown_server(self, server): + """Shut down a server.""" return shutdown_server(server, log=self.log) def _shutdown_or_exit(self, target_endpoint, server): - print("Shutting down server on %s..." % target_endpoint) + """Handle a shutdown.""" + self.log.info("Shutting down server on %s..." % target_endpoint) if not self.shutdown_server(server): sys.exit("Could not stop server on %s" % target_endpoint) @staticmethod def _maybe_remove_unix_socket(socket_path): + """Try to remove a socket path.""" try: os.unlink(socket_path) except OSError: pass def start(self): + """Start the server stop app.""" + info = self.log.info servers = list(list_running_servers(self.runtime_dir, log=self.log)) if not servers: self.exit("There are no running servers (per %s)" % self.runtime_dir) @@ -589,30 +637,29 @@ def start(self): self._shutdown_or_exit(port, server) return current_endpoint = self.sock or self.port - print( - f"There is currently no server running on {current_endpoint}", - file=sys.stderr, - ) - print("Ports/sockets currently in use:", file=sys.stderr) + info(f"There is currently no server running on {current_endpoint}") + info("Ports/sockets currently in use:") for server in servers: - print(" - {}".format(server.get("sock") or server["port"]), file=sys.stderr) + info(" - {}".format(server.get("sock") or server["port"])) self.exit(1) class JupyterServerListApp(JupyterApp): - version = __version__ - description = _i18n("List currently running Jupyter servers.") + """An application to list running Jupyter servers.""" - flags = dict( - jsonlist=( + version: str = __version__ + description: str = _i18n("List currently running Jupyter servers.") + + flags = { + "jsonlist": ( {"JupyterServerListApp": {"jsonlist": True}}, _i18n("Produce machine-readable JSON list output."), ), - json=( + "json": ( {"JupyterServerListApp": {"json": True}}, _i18n("Produce machine-readable JSON object on each line of output."), ), - ) + } jsonlist = Bool( False, @@ -634,6 +681,7 @@ class JupyterServerListApp(JupyterApp): ) def start(self): + """Start the server list application.""" serverinfo_list = list(list_running_servers(self.runtime_dir, log=self.log)) if self.jsonlist: print(json.dumps(serverinfo_list, indent=2)) @@ -714,18 +762,19 @@ def start(self): class ServerApp(JupyterApp): + """The Jupyter Server application class.""" name = "jupyter-server" - version = __version__ - description = _i18n( + version: str = __version__ + description: str = _i18n( """The Jupyter Server. This launches a Tornado-based Jupyter Server.""" ) examples = _examples - flags = Dict(flags) - aliases = Dict(aliases) + flags = Dict(flags) # type:ignore[assignment] + aliases = Dict(aliases) # type:ignore[assignment] classes = [ KernelManager, @@ -741,18 +790,31 @@ class ServerApp(JupyterApp): GatewayMappingKernelManager, GatewayKernelSpecManager, GatewaySessionManager, + GatewayWebSocketConnection, GatewayClient, Authorizer, + EventLogger, + ZMQChannelsWebsocketConnection, ] - if terminado_available: # Only necessary when terminado is available - classes.append(TerminalManager) - subcommands = dict( - list=(JupyterServerListApp, JupyterServerListApp.description.splitlines()[0]), - stop=(JupyterServerStopApp, JupyterServerStopApp.description.splitlines()[0]), - password=(JupyterPasswordApp, JupyterPasswordApp.description.splitlines()[0]), - extension=(ServerExtensionApp, ServerExtensionApp.description.splitlines()[0]), - ) + subcommands: dict[str, t.Any] = { + "list": ( + JupyterServerListApp, + JupyterServerListApp.description.splitlines()[0], + ), + "stop": ( + JupyterServerStopApp, + JupyterServerStopApp.description.splitlines()[0], + ), + "password": ( + JupyterPasswordApp, + JupyterPasswordApp.description.splitlines()[0], + ), + "extension": ( + ServerExtensionApp, + ServerExtensionApp.description.splitlines()[0], + ), + } # A list of services whose handlers will be exposed. # Subclasses can override this list to @@ -770,16 +832,18 @@ class ServerApp(JupyterApp): "sessions", "shutdown", "view", + "events", ) - _log_formatter_cls = LogFormatter + _log_formatter_cls = LogFormatter # type:ignore[assignment] + _stopping = Bool(False, help="Signal that we've begun stopping.") @default("log_level") - def _default_log_level(self): + def _default_log_level(self) -> int: return logging.INFO @default("log_format") - def _default_log_format(self): + def _default_log_format(self) -> str: """override default log format to include date & time""" return ( "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" @@ -848,7 +912,7 @@ def _default_log_format(self): ) @default("ip") - def _default_ip(self): + def _default_ip(self) -> str: """Return localhost if available, 127.0.0.1 otherwise. On some (horribly broken) systems, localhost cannot be bound. @@ -866,8 +930,8 @@ def _default_ip(self): return "localhost" @validate("ip") - def _validate_ip(self, proposal): - value = proposal["value"] + def _validate_ip(self, proposal: t.Any) -> str: + value = t.cast(str, proposal["value"]) if value == "*": value = "" return value @@ -898,7 +962,7 @@ def _validate_ip(self, proposal): ) @default("port") - def port_default(self): + def _port_default(self) -> int: return int(os.getenv(self.port_env, self.port_default_value)) port_retries_env = "JUPYTER_PORT_RETRIES" @@ -913,7 +977,7 @@ def port_default(self): ) @default("port_retries") - def port_retries_default(self): + def _port_retries_default(self) -> int: return int(os.getenv(self.port_retries_env, self.port_retries_default_value)) sock = Unicode("", config=True, help="The UNIX socket the Jupyter server will listen on.") @@ -925,7 +989,7 @@ def port_retries_default(self): ) @validate("sock_mode") - def _validate_sock_mode(self, proposal): + def _validate_sock_mode(self, proposal: t.Any) -> t.Any: value = proposal["value"] try: converted_value = int(value.encode(), 8) @@ -938,12 +1002,14 @@ def _validate_sock_mode(self, proposal): converted_value <= 2**12, ) ) - except ValueError: - raise TraitError('invalid --sock-mode value: %s, please specify as e.g. "0600"' % value) - except AssertionError: + except ValueError as e: + raise TraitError( + 'invalid --sock-mode value: %s, please specify as e.g. "0600"' % value + ) from e + except AssertionError as e: raise TraitError( "invalid --sock-mode value: %s, must have u+rw (0600) at a minimum" % value - ) + ) from e return value certfile = Unicode( @@ -971,7 +1037,7 @@ def _validate_sock_mode(self, proposal): ) @default("cookie_secret_file") - def _default_cookie_secret_file(self): + def _default_cookie_secret_file(self) -> str: return os.path.join(self.runtime_dir, "jupyter_cookie_secret") cookie_secret = Bytes( @@ -987,7 +1053,7 @@ def _default_cookie_secret_file(self): ) @default("cookie_secret") - def _default_cookie_secret(self): + def _default_cookie_secret(self) -> bytes: if os.path.exists(self.cookie_secret_file): with open(self.cookie_secret_file, "rb") as f: key = f.read() @@ -998,7 +1064,7 @@ def _default_cookie_secret(self): h.update(self.password.encode()) return h.digest() - def _write_cookie_secret_file(self, secret): + def _write_cookie_secret_file(self, secret: bytes) -> None: """write my secret to my secret_file""" self.log.info(_i18n("Writing Jupyter server cookie secret to %s"), self.cookie_secret_file) try: @@ -1011,40 +1077,24 @@ def _write_cookie_secret_file(self, secret): e, ) - token = Unicode( - "", - help=_i18n( - """Token used for authenticating first-time connections to the server. - - The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly - with the JUPYTER_TOKEN environment variable. - - When no password is enabled, - the default is to generate a new, random token. + _token_set = False - Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. - """ - ), - ).tag(config=True) + token = Unicode("", help=_i18n("""DEPRECATED. Use IdentityProvider.token""")).tag( + config=True + ) - _token_generated = True + @observe("token") + def _deprecated_token(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "IdentityProvider") @default("token") - def _token_default(self): - if os.getenv("JUPYTER_TOKEN"): - self._token_generated = False - return os.getenv("JUPYTER_TOKEN") - if os.getenv("JUPYTER_TOKEN_FILE"): - self._token_generated = False - with open(os.getenv("JUPYTER_TOKEN_FILE")) as token_file: - return token_file.read() - if self.password: - # no token if password is enabled - self._token_generated = False - return "" - else: - self._token_generated = True - return binascii.hexlify(os.urandom(24)).decode("ascii") + def _deprecated_token_access(self) -> str: + warnings.warn( + "ServerApp.token config is deprecated in jupyter-server 2.0. Use IdentityProvider.token", + DeprecationWarning, + stacklevel=3, + ) + return self.identity_provider.token min_open_files_limit = Integer( config=True, @@ -1058,10 +1108,10 @@ def _token_default(self): ) @default("min_open_files_limit") - def _default_min_open_files_limit(self): + def _default_min_open_files_limit(self) -> t.Optional[int]: if resource is None: # Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows) - return None + return None # type:ignore[unreachable] soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) @@ -1099,55 +1149,59 @@ def _default_min_open_files_limit(self): """, ) - @observe("token") - def _token_changed(self, change): - self._token_generated = False - password = Unicode( "", config=True, - help="""Hashed password to use for web authentication. - - To generate, type in a python/IPython shell: - - from jupyter_server.auth import passwd; passwd() - - The string should be of the form type:salt:hashed-password. - """, + help="""DEPRECATED in 2.0. Use PasswordIdentityProvider.hashed_password""", ) password_required = Bool( False, config=True, - help="""Forces users to use a password for the Jupyter server. - This is useful in a multi user environment, for instance when - everybody in the LAN can access each other's machine through ssh. - - In such a case, serving on localhost is not secure since - any user can connect to the Jupyter server via ssh. - - """, + help="""DEPRECATED in 2.0. Use PasswordIdentityProvider.password_required""", ) allow_password_change = Bool( True, config=True, - help="""Allow password to be changed at login for the Jupyter server. + help="""DEPRECATED in 2.0. Use PasswordIdentityProvider.allow_password_change""", + ) + + def _warn_deprecated_config( + self, change: t.Any, clsname: str, new_name: t.Optional[str] = None + ) -> None: + """Warn on deprecated config.""" + if new_name is None: + new_name = change.name + if clsname not in self.config or new_name not in self.config[clsname]: + # Deprecated config used, new config not used. + # Use deprecated config, warn about new name. + self.log.warning( + f"ServerApp.{change.name} config is deprecated in 2.0. Use {clsname}.{new_name}." + ) + self.config[clsname][new_name] = change.new + # Deprecated config used, new config also used. + # Warn only if the values differ. + # If the values are the same, assume intentional backward-compatible config. + elif self.config[clsname][new_name] != change.new: + self.log.warning( + f"Ignoring deprecated ServerApp.{change.name} config. Using {clsname}.{new_name}." + ) - While logging in with a token, the Jupyter server UI will give the opportunity to - the user to enter a new password at the same time that will replace - the token login mechanism. + @observe("password") + def _deprecated_password(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "PasswordIdentityProvider", new_name="hashed_password") - This can be set to false to prevent changing password from the UI/API. - """, - ) + @observe("password_required", "allow_password_change") + def _deprecated_password_config(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "PasswordIdentityProvider") disable_check_xsrf = Bool( False, config=True, help="""Disable cross-site-request-forgery protection - Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, + Jupyter server includes protection from cross-site request forgeries, requiring API requests to either: - originate from pages served by this server (validated with XSRF cookie and token), or @@ -1178,7 +1232,7 @@ def _token_changed(self, change): ) @default("allow_remote_access") - def _default_allow_remote(self): + def _default_allow_remote(self) -> bool: """Disallow remote access if we're listening only on loopback addresses""" # if blank, self.ip was configured to "*" meaning bind to all interfaces, @@ -1191,10 +1245,10 @@ def _default_allow_remote(self): except ValueError: # Address is a hostname for info in socket.getaddrinfo(self.ip, self.port, 0, socket.SOCK_STREAM): - addr = info[4][0] + addr = info[4][0] # type:ignore[assignment] try: - parsed = ipaddress.ip_address(addr.split("%")[0]) + parsed = ipaddress.ip_address(addr.split("%")[0]) # type:ignore[union-attr] except ValueError: self.log.warning("Unrecognised IP address: %r", addr) continue @@ -1202,7 +1256,9 @@ def _default_allow_remote(self): # Macs map localhost to 'fe80::1%lo0', a link local address # scoped to the loopback interface. For now, we'll assume that # any scoped link-local address is effectively local. - if not (parsed.is_loopback or (("%" in addr) and parsed.is_link_local)): + if not ( + parsed.is_loopback or (("%" in addr) and parsed.is_link_local) # type:ignore[operator] + ): return True return False else: @@ -1300,24 +1356,24 @@ def _default_allow_remote(self): ), ) terminado_settings = Dict( + Union([List(), Unicode()]), config=True, help=_i18n('Supply overrides for terminado. Currently only supports "shell_command".'), ) cookie_options = Dict( config=True, - help=_i18n( - "Extra keyword arguments to pass to `set_secure_cookie`." - " See tornado's set_secure_cookie docs for details." - ), + help=_i18n("DEPRECATED. Use IdentityProvider.cookie_options"), ) get_secure_cookie_kwargs = Dict( config=True, - help=_i18n( - "Extra keyword arguments to pass to `get_secure_cookie`." - " See tornado's get_secure_cookie docs for details." - ), + help=_i18n("DEPRECATED. Use IdentityProvider.get_secure_cookie_kwargs"), ) + + @observe("cookie_options", "get_secure_cookie_kwargs") + def _deprecated_cookie_config(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "IdentityProvider") + ssl_options = Dict( allow_none=True, config=True, @@ -1348,8 +1404,8 @@ def _default_allow_remote(self): ) @validate("base_url") - def _update_base_url(self, proposal): - value = proposal["value"] + def _update_base_url(self, proposal: t.Any) -> str: + value = t.cast(str, proposal["value"]) if not value.startswith("/"): value = "/" + value if not value.endswith("/"): @@ -1366,14 +1422,14 @@ def _update_base_url(self, proposal): ) @property - def static_file_path(self): + def static_file_path(self) -> list[str]: """return extra paths + the default location""" - return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] + return [*self.extra_static_paths, DEFAULT_STATIC_FILES_PATH] static_custom_path = List(Unicode(), help=_i18n("""Path to search for custom.js, css""")) @default("static_custom_path") - def _default_static_custom_path(self): + def _default_static_custom_path(self) -> list[str]: return [os.path.join(d, "custom") for d in (self.config_dir, DEFAULT_STATIC_FILES_PATH)] extra_template_paths = List( @@ -1387,7 +1443,7 @@ def _default_static_custom_path(self): ) @property - def template_file_path(self): + def template_file_path(self) -> list[str]: """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST @@ -1415,51 +1471,50 @@ def template_file_path(self): help="""If True, display controls to shut down the Jupyter server, such as menu items or buttons.""", ) - # REMOVE in VERSION 2.0 - # Temporarily allow content managers to inherit from the 'notebook' - # package. We will deprecate this in the next major release. - contents_manager_class = TypeFromClasses( - default_value=LargeFileManager, - klasses=[ - "jupyter_server.services.contents.manager.ContentsManager", - "notebook.services.contents.manager.ContentsManager", - ], + contents_manager_class = Type( + default_value=AsyncLargeFileManager, + klass=ContentsManager, config=True, help=_i18n("The content manager class to use."), ) - # Throws a deprecation warning to notebook based contents managers. - @observe("contents_manager_class") - def _observe_contents_manager_class(self, change): - new = change["new"] - # If 'new' is a class, get a string representing the import - # module path. - if inspect.isclass(new): - new = new.__module__ - - if new.startswith("notebook"): - self.log.warning( - "The specified 'contents_manager_class' class inherits a manager from the " - "'notebook' package. This is not guaranteed to work in future " - "releases of Jupyter Server. Instead, consider switching the " - "manager to inherit from the 'jupyter_server' managers. " - "Jupyter Server will temporarily allow 'notebook' managers " - "until its next major release (2.x)." - ) - kernel_manager_class = Type( - default_value=AsyncMappingKernelManager, klass=MappingKernelManager, config=True, help=_i18n("The kernel manager class to use."), ) + @default("kernel_manager_class") + def _default_kernel_manager_class(self) -> t.Union[str, type[AsyncMappingKernelManager]]: + if self.gateway_config.gateway_enabled: + return "jupyter_server.gateway.managers.GatewayMappingKernelManager" + return AsyncMappingKernelManager + session_manager_class = Type( - default_value=SessionManager, config=True, help=_i18n("The session manager class to use."), ) + @default("session_manager_class") + def _default_session_manager_class(self) -> t.Union[str, type[SessionManager]]: + if self.gateway_config.gateway_enabled: + return "jupyter_server.gateway.managers.GatewaySessionManager" + return SessionManager + + kernel_websocket_connection_class = Type( + klass=BaseKernelWebsocketConnection, + config=True, + help=_i18n("The kernel websocket connection class to use."), + ) + + @default("kernel_websocket_connection_class") + def _default_kernel_websocket_connection_class( + self, + ) -> t.Union[str, type[ZMQChannelsWebsocketConnection]]: + if self.gateway_config.gateway_enabled: + return "jupyter_server.gateway.connections.GatewayWebSocketConnection" + return ZMQChannelsWebsocketConnection + config_manager_class = Type( default_value=ConfigManager, config=True, @@ -1469,7 +1524,6 @@ def _observe_contents_manager_class(self, change): kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) kernel_spec_manager_class = Type( - default_value=KernelSpecManager, config=True, help=""" The kernel spec manager class to use. Should be a subclass @@ -1480,9 +1534,16 @@ def _observe_contents_manager_class(self, change): """, ) + @default("kernel_spec_manager_class") + def _default_kernel_spec_manager_class(self) -> t.Union[str, type[KernelSpecManager]]: + if self.gateway_config.gateway_enabled: + return "jupyter_server.gateway.managers.GatewayKernelSpecManager" + return KernelSpecManager + login_handler_class = Type( default_value=LoginHandler, klass=web.RequestHandler, + allow_none=True, config=True, help=_i18n("The login handler class to use."), ) @@ -1490,9 +1551,11 @@ def _observe_contents_manager_class(self, change): logout_handler_class = Type( default_value=LogoutHandler, klass=web.RequestHandler, + allow_none=True, config=True, help=_i18n("The logout handler class to use."), ) + # TODO: detect deprecated login handler config authorizer_class = Type( default_value=AllowAllAuthorizer, @@ -1501,6 +1564,13 @@ def _observe_contents_manager_class(self, change): help=_i18n("The authorizer class to use."), ) + identity_provider_class = Type( + default_value=PasswordIdentityProvider, + klass=IdentityProvider, + config=True, + help=_i18n("The identity provider class to use."), + ) + trust_xheaders = Bool( False, config=True, @@ -1512,24 +1582,34 @@ def _observe_contents_manager_class(self, change): ), ) + event_logger = Instance( + EventLogger, + allow_none=True, + help="An EventLogger for emitting structured event data from Jupyter Server and extensions.", + ) + info_file = Unicode() @default("info_file") - def _default_info_file(self): + def _default_info_file(self) -> str: info_file = "jpserver-%s.json" % os.getpid() return os.path.join(self.runtime_dir, info_file) + no_browser_open_file = Bool( + False, help="If True, do not write redirect HTML file disk, or show in messages." + ) + browser_open_file = Unicode() @default("browser_open_file") - def _default_browser_open_file(self): + def _default_browser_open_file(self) -> str: basename = "jpserver-%s-open.html" % os.getpid() return os.path.join(self.runtime_dir, basename) browser_open_file_to_run = Unicode() @default("browser_open_file_to_run") - def _default_browser_open_file_to_run(self): + def _default_browser_open_file_to_run(self) -> str: basename = "jpserver-file-to-run-%s-open.html" % os.getpid() return os.path.join(self.runtime_dir, basename) @@ -1544,12 +1624,9 @@ def _default_browser_open_file_to_run(self): ) @observe("pylab") - def _update_pylab(self, change): + def _update_pylab(self, change: t.Any) -> None: """when --pylab is specified, display a warning and exit""" - if change["new"] != "warn": - backend = " %s" % change["new"] - else: - backend = "" + backend = " %s" % change["new"] if change["new"] != "warn" else "" self.log.error( _i18n("Support for specifying --pylab on the command line has been removed.") ) @@ -1563,25 +1640,46 @@ def _update_pylab(self, change): notebook_dir = Unicode(config=True, help=_i18n("DEPRECATED, use root_dir.")) @observe("notebook_dir") - def _update_notebook_dir(self, change): + def _update_notebook_dir(self, change: t.Any) -> None: if self._root_dir_set: # only use deprecated config if new config is not set return self.log.warning(_i18n("notebook_dir is deprecated, use root_dir")) self.root_dir = change["new"] + external_connection_dir = Unicode( + None, + allow_none=True, + config=True, + help=_i18n( + "The directory to look at for external kernel connection files, if allow_external_kernels is True. " + "Defaults to Jupyter runtime_dir/external_kernels. " + "Make sure that this directory is not filled with left-over connection files, " + "that could result in unnecessary kernel manager creations." + ), + ) + + allow_external_kernels = Bool( + False, + config=True, + help=_i18n( + "Whether or not to allow external kernels, whose connection files are placed in external_connection_dir." + ), + ) + root_dir = Unicode(config=True, help=_i18n("The directory to use for notebooks and kernels.")) _root_dir_set = False @default("root_dir") - def _default_root_dir(self): + def _default_root_dir(self) -> str: if self.file_to_run: self._root_dir_set = True return os.path.dirname(os.path.abspath(self.file_to_run)) else: return os.getcwd() - def _normalize_dir(self, value): + def _normalize_dir(self, value: str) -> str: + """Normalize a directory.""" # Strip any trailing slashes # *except* if it's root _, path = os.path.splitdrive(value) @@ -1594,46 +1692,36 @@ def _normalize_dir(self, value): return value @validate("root_dir") - def _root_dir_validate(self, proposal): + def _root_dir_validate(self, proposal: t.Any) -> str: value = self._normalize_dir(proposal["value"]) if not os.path.isdir(value): raise TraitError(trans.gettext("No such directory: '%r'") % value) return value + @observe("root_dir") + def _root_dir_changed(self, change: t.Any) -> None: + # record that root_dir is set, + # which affects loading of deprecated notebook_dir + self._root_dir_set = True + preferred_dir = Unicode( config=True, help=trans.gettext("Preferred starting directory to use for notebooks and kernels."), ) @default("preferred_dir") - def _default_prefered_dir(self): + def _default_prefered_dir(self) -> str: return self.root_dir @validate("preferred_dir") - def _preferred_dir_validate(self, proposal): + def _preferred_dir_validate(self, proposal: t.Any) -> str: value = self._normalize_dir(proposal["value"]) if not os.path.isdir(value): raise TraitError(trans.gettext("No such preferred dir: '%r'") % value) - - # preferred_dir must be equal or a subdir of root_dir - if not value.startswith(self.root_dir): - raise TraitError( - trans.gettext("preferred_dir must be equal or a subdir of root_dir: '%r'") % value - ) - return value - @observe("root_dir") - def _root_dir_changed(self, change): - self._root_dir_set = True - if not self.preferred_dir.startswith(change["new"]): - self.log.warning( - trans.gettext("Value of preferred_dir updated to use value of root_dir") - ) - self.preferred_dir = change["new"] - @observe("server_extensions") - def _update_server_extensions(self, change): + def _update_server_extensions(self, change: t.Any) -> None: self.log.warning(_i18n("server_extensions is deprecated, use jpserver_extensions")) self.server_extensions = change["new"] @@ -1658,63 +1746,61 @@ def _update_server_extensions(self, change): ) kernel_ws_protocol = Unicode( - None, allow_none=True, config=True, - help=_i18n( - "Preferred kernel message protocol over websocket to use (default: None). " - "If an empty string is passed, select the legacy protocol. If None, " - "the selected protocol will depend on what the front-end supports " - "(usually the most recent protocol supported by the back-end and the " - "front-end)." - ), + help=_i18n("DEPRECATED. Use ZMQChannelsWebsocketConnection.kernel_ws_protocol"), ) + @observe("kernel_ws_protocol") + def _deprecated_kernel_ws_protocol(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "ZMQChannelsWebsocketConnection") + limit_rate = Bool( - True, + allow_none=True, config=True, - help=_i18n( - "Whether to limit the rate of IOPub messages (default: True). " - "If True, use iopub_msg_rate_limit, iopub_data_rate_limit and/or rate_limit_window " - "to tune the rate." - ), + help=_i18n("DEPRECATED. Use ZMQChannelsWebsocketConnection.limit_rate"), ) + @observe("limit_rate") + def _deprecated_limit_rate(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "ZMQChannelsWebsocketConnection") + iopub_msg_rate_limit = Float( - 1000, + allow_none=True, config=True, - help=_i18n( - """(msgs/sec) - Maximum rate at which messages can be sent on iopub before they are - limited.""" - ), + help=_i18n("DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_msg_rate_limit"), ) + @observe("iopub_msg_rate_limit") + def _deprecated_iopub_msg_rate_limit(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "ZMQChannelsWebsocketConnection") + iopub_data_rate_limit = Float( - 1000000, + allow_none=True, config=True, - help=_i18n( - """(bytes/sec) - Maximum rate at which stream output can be sent on iopub before they are - limited.""" - ), + help=_i18n("DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_data_rate_limit"), ) + @observe("iopub_data_rate_limit") + def _deprecated_iopub_data_rate_limit(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "ZMQChannelsWebsocketConnection") + rate_limit_window = Float( - 3, + allow_none=True, config=True, - help=_i18n( - """(sec) Time window used to - check the message and data rate limits.""" - ), + help=_i18n("DEPRECATED. Use ZMQChannelsWebsocketConnection.rate_limit_window"), ) + @observe("rate_limit_window") + def _deprecated_rate_limit_window(self, change: t.Any) -> None: + self._warn_deprecated_config(change, "ZMQChannelsWebsocketConnection") + shutdown_no_activity_timeout = Integer( 0, config=True, help=( - "Shut down the server after N seconds with no kernels or " - "terminals running and no activity. " + "Shut down the server after N seconds with no kernels" + "running and no activity. " "This can be used together with culling idle kernels " "(MappingKernelManager.cull_idle_timeout) to " "shutdown the Jupyter server when it's not in use. This is not " @@ -1724,7 +1810,6 @@ def _update_server_extensions(self, change): ) terminals_enabled = Bool( - True, config=True, help=_i18n( """Set to False to disable terminals. @@ -1738,14 +1823,9 @@ def _update_server_extensions(self, change): ), ) - # Since use of terminals is also a function of whether the terminado package is - # available, this variable holds the "final indication" of whether terminal functionality - # should be considered (particularly during shutdown/cleanup). It is enabled only - # once both the terminals "service" can be initialized and terminals_enabled is True. - # Note: this variable is slightly different from 'terminals_available' in the web settings - # in that this variable *could* remain false if terminado is available, yet the terminal - # service's initialization still fails. As a result, this variable holds the truth. - terminals_available = False + @default("terminals_enabled") + def _default_terminals_enabled(self) -> bool: + return True authenticate_prometheus = Bool( True, @@ -1755,6 +1835,17 @@ def _update_server_extensions(self, change): config=True, ) + static_immutable_cache = List( + Unicode(), + help=""" + Paths to set up static files as immutable. + + This allow setting up the cache control of static files as immutable. + It should be used for static file named with a hash for instance. + """, + config=True, + ) + _starter_app = Instance( default_value=None, allow_none=True, @@ -1762,12 +1853,12 @@ def _update_server_extensions(self, change): ) @property - def starter_app(self): + def starter_app(self) -> t.Any: """Get the Extension that started this server.""" return self._starter_app - def parse_command_line(self, argv=None): - + def parse_command_line(self, argv: t.Optional[list[str]] = None) -> None: + """Parse the command line options.""" super().parse_command_line(argv) if self.extra_args: @@ -1787,34 +1878,62 @@ def parse_command_line(self, argv=None): c.ServerApp.file_to_run = f self.update_config(c) - def init_configurables(self): - + def init_configurables(self) -> None: + """Initialize configurables.""" # If gateway server is configured, replace appropriate managers to perform redirection. To make # this determination, instantiate the GatewayClient config singleton. self.gateway_config = GatewayClient.instance(parent=self) - if self.gateway_config.gateway_enabled: - self.kernel_manager_class = ( - "jupyter_server.gateway.managers.GatewayMappingKernelManager" + if not issubclass( + self.kernel_manager_class, + AsyncMappingKernelManager, + ): + warnings.warn( + "The synchronous MappingKernelManager class is deprecated and will not be supported in Jupyter Server 3.0", + DeprecationWarning, + stacklevel=2, ) - self.session_manager_class = "jupyter_server.gateway.managers.GatewaySessionManager" - self.kernel_spec_manager_class = ( - "jupyter_server.gateway.managers.GatewayKernelSpecManager" + + if not issubclass( + self.contents_manager_class, + AsyncContentsManager, + ): + warnings.warn( + "The synchronous ContentsManager classes are deprecated and will not be supported in Jupyter Server 3.0", + DeprecationWarning, + stacklevel=2, ) self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ) - self.kernel_manager = self.kernel_manager_class( - parent=self, - log=self.log, - connection_dir=self.runtime_dir, - kernel_spec_manager=self.kernel_spec_manager, - ) + + kwargs = { + "parent": self, + "log": self.log, + "connection_dir": self.runtime_dir, + "kernel_spec_manager": self.kernel_spec_manager, + } + if jupyter_client.version_info > (8, 3, 0): # type:ignore[attr-defined] + if self.allow_external_kernels: + external_connection_dir = self.external_connection_dir + if external_connection_dir is None: + external_connection_dir = str(Path(self.runtime_dir) / "external_kernels") + kwargs["external_connection_dir"] = external_connection_dir + elif self.allow_external_kernels: + self.log.warning( + "Although allow_external_kernels=True, external kernels are not supported " + "because jupyter-client's version does not allow them (should be >8.3.0)." + ) + + self.kernel_manager = self.kernel_manager_class(**kwargs) self.contents_manager = self.contents_manager_class( parent=self, log=self.log, ) + # Trigger a default/validation here explicitly while we still support the + # deprecated trait on ServerApp (FIXME remove when deprecation finalized) + self.contents_manager.preferred_dir # noqa: B018 self.session_manager = self.session_manager_class( parent=self, log=self.log, @@ -1825,9 +1944,58 @@ def init_configurables(self): parent=self, log=self.log, ) - self.authorizer = self.authorizer_class(parent=self, log=self.log) + identity_provider_kwargs = {"parent": self, "log": self.log} + + if ( + self.login_handler_class is not LoginHandler + and self.identity_provider_class is PasswordIdentityProvider + ): + # default identity provider, non-default LoginHandler + # this indicates legacy custom LoginHandler config. + # enable LegacyIdentityProvider, which defers to the LoginHandler for pre-2.0 behavior. + self.identity_provider_class = LegacyIdentityProvider + self.log.warning( + f"Customizing authentication via ServerApp.login_handler_class={self.login_handler_class}" + " is deprecated in Jupyter Server 2.0." + " Use ServerApp.identity_provider_class." + " Falling back on legacy authentication.", + ) + identity_provider_kwargs["login_handler_class"] = self.login_handler_class + if self.logout_handler_class: + identity_provider_kwargs["logout_handler_class"] = self.logout_handler_class + elif self.login_handler_class is not LoginHandler: + # non-default login handler ignored because also explicitly set identity provider + self.log.warning( + f"Ignoring deprecated config ServerApp.login_handler_class={self.login_handler_class}." + " Superseded by ServerApp.identity_provider_class={self.identity_provider_class}." + ) + self.identity_provider = self.identity_provider_class(**identity_provider_kwargs) + + if self.identity_provider_class is LegacyIdentityProvider: + # legacy config stored the password in tornado_settings + self.tornado_settings["password"] = self.identity_provider.hashed_password # type:ignore[attr-defined] + self.tornado_settings["token"] = self.identity_provider.token - def init_logging(self): + if self._token_set: + self.log.warning( + "ServerApp.token config is deprecated in jupyter-server 2.0. Use IdentityProvider.token" + ) + if self.identity_provider.token_generated: + # default behavior: generated default token + # preserve deprecated ServerApp.token config + self.identity_provider.token_generated = False + self.identity_provider.token = self.token + else: + # identity_provider didn't generate a default token, + # that means it has some config that should take higher priority than deprecated ServerApp.token + self.log.warning("Ignoring deprecated ServerApp.token config") + + self.authorizer = self.authorizer_class( + parent=self, log=self.log, identity_provider=self.identity_provider + ) + + def init_logging(self) -> None: + """Initialize logging.""" # This prevents double log messages because tornado use a root logger that # self.log is a child of. The logging module dipatches log messages to a log # and all of its ancenstors until propagate is set to False. @@ -1842,7 +2010,25 @@ def init_logging(self): logger.parent = self.log logger.setLevel(self.log.level) - def init_webapp(self): + def init_event_logger(self) -> None: + """Initialize the Event Bus.""" + self.event_logger = EventLogger(parent=self) + # Load the core Jupyter Server event schemas + # All event schemas must start with Jupyter Server's + # events URI, `JUPYTER_SERVER_EVENTS_URI`. + schema_ids = [ + "https://events.jupyter.org/jupyter_server/contents_service/v1", + "https://events.jupyter.org/jupyter_server/gateway_client/v1", + "https://events.jupyter.org/jupyter_server/kernel_actions/v1", + ] + for schema_id in schema_ids: + # Get the schema path from the schema ID. + rel_schema_path = schema_id.replace(JUPYTER_SERVER_EVENTS_URI + "/", "") + ".yaml" + schema_path = DEFAULT_EVENTS_SCHEMA_PATH / rel_schema_path + # Use this pathlib object to register the schema + self.event_logger.register_event_schema(schema_path) + + def init_webapp(self) -> None: """initialize tornado webapp""" self.tornado_settings["allow_origin"] = self.allow_origin self.tornado_settings["websocket_compression_options"] = self.websocket_compression_options @@ -1850,22 +2036,21 @@ def init_webapp(self): self.tornado_settings["allow_origin_pat"] = re.compile(self.allow_origin_pat) self.tornado_settings["allow_credentials"] = self.allow_credentials self.tornado_settings["autoreload"] = self.autoreload - self.tornado_settings["cookie_options"] = self.cookie_options - self.tornado_settings["get_secure_cookie_kwargs"] = self.get_secure_cookie_kwargs - self.tornado_settings["token"] = self.token + + # deprecate accessing these directly, in favor of identity_provider? + self.tornado_settings["cookie_options"] = self.identity_provider.cookie_options + self.tornado_settings[ + "get_secure_cookie_kwargs" + ] = self.identity_provider.get_secure_cookie_kwargs + self.tornado_settings["token"] = self.identity_provider.token + + if self.static_immutable_cache: + self.tornado_settings["static_immutable_cache"] = self.static_immutable_cache # ensure default_url starts with base_url if not self.default_url.startswith(self.base_url): self.default_url = url_path_join(self.base_url, self.default_url) - if self.password_required and (not self.password): - self.log.critical( - _i18n("Jupyter servers are configured to only be run with a password.") - ) - self.log.critical(_i18n("Hint: run the following command to set a password")) - self.log.critical(_i18n("\t$ python -m jupyter_server.auth password")) - sys.exit(1) - # Socket options validation. if self.sock: if self.port != DEFAULT_JUPYTER_SERVER_PORT: @@ -1906,6 +2091,7 @@ def init_webapp(self): self.session_manager, self.kernel_spec_manager, self.config_manager, + self.event_logger, self.extra_services, self.log, self.base_url, @@ -1913,6 +2099,8 @@ def init_webapp(self): self.tornado_settings, self.jinja_environment_options, authorizer=self.authorizer, + identity_provider=self.identity_provider, + kernel_websocket_connection_class=self.kernel_websocket_connection_class, ) if self.certfile: self.ssl_options["certfile"] = self.certfile @@ -1923,7 +2111,7 @@ def init_webapp(self): if not self.ssl_options: # could be an empty dict or None # None indicates no SSL config - self.ssl_options = None + self.ssl_options = None # type:ignore[assignment] else: # SSL may be missing, so only import it if it's to be used import ssl @@ -1936,12 +2124,16 @@ def init_webapp(self): if self.ssl_options.get("ca_certs", False): self.ssl_options.setdefault("cert_reqs", ssl.CERT_REQUIRED) - self.login_handler_class.validate_security(self, ssl_options=self.ssl_options) + self.identity_provider.validate_security(self, ssl_options=self.ssl_options) - def init_resources(self): + if isinstance(self.identity_provider, LegacyIdentityProvider): + # LegacyIdentityProvider needs access to the tornado settings dict + self.identity_provider.settings = self.web_app.settings + + def init_resources(self) -> None: """initialize system resources""" if resource is None: - self.log.debug( + self.log.debug( # type:ignore[unreachable] "Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows)" ) return @@ -1949,17 +2141,17 @@ def init_resources(self): old_soft, old_hard = resource.getrlimit(resource.RLIMIT_NOFILE) soft = self.min_open_files_limit hard = old_hard - if old_soft < soft: + if soft is not None and old_soft < soft: if hard < soft: hard = soft self.log.debug( - "Raising open file limit: soft {}->{}; hard {}->{}".format( - old_soft, soft, old_hard, hard - ) + f"Raising open file limit: soft {old_soft}->{soft}; hard {old_hard}->{hard}" ) resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) - def _get_urlparts(self, path=None, include_token=False): + def _get_urlparts( + self, path: t.Optional[str] = None, include_token: bool = False + ) -> urllib.parse.ParseResult: """Constructs a urllib named tuple, ParseResult, with default values set by server config. The returned tuple can be manipulated using the `_replace` method. @@ -1976,30 +2168,24 @@ def _get_urlparts(self, path=None, include_token=False): else: ip = f"[{self.ip}]" if ":" in self.ip else self.ip netloc = f"{ip}:{self.port}" - if self.certfile: - scheme = "https" - else: - scheme = "http" + scheme = "https" if self.certfile else "http" if not path: path = self.default_url query = None - if include_token: - if self.token: # Don't log full token if it came from config - token = self.token if self._token_generated else "..." - query = urllib.parse.urlencode({"token": token}) + # Don't log full token if it came from config + if include_token and self.identity_provider.token: + token = ( + self.identity_provider.token if self.identity_provider.token_generated else "..." + ) + query = urllib.parse.urlencode({"token": token}) # Build the URL Parts to dump. urlparts = urllib.parse.ParseResult( - scheme=scheme, - netloc=netloc, - path=path, - params=None, - query=query, - fragment=None, + scheme=scheme, netloc=netloc, path=path, query=query or "", params="", fragment="" ) return urlparts @property - def public_url(self): + def public_url(self) -> str: parts = self._get_urlparts(include_token=True) # Update with custom pieces. if self.custom_display_url: @@ -2012,7 +2198,7 @@ def public_url(self): return parts.geturl() @property - def local_url(self): + def local_url(self) -> str: parts = self._get_urlparts(include_token=True) # Update with custom pieces. if not self.sock: @@ -2020,37 +2206,25 @@ def local_url(self): return parts.geturl() @property - def display_url(self): + def display_url(self) -> str: """Human readable string with URLs for interacting with the running Jupyter Server """ - url = self.public_url + "\n or " + self.local_url + url = self.public_url + "\n " + self.local_url return url @property - def connection_url(self): + def connection_url(self) -> str: urlparts = self._get_urlparts(path=self.base_url) return urlparts.geturl() - def init_terminals(self): - if not self.terminals_enabled: - return - - try: - from jupyter_server.terminal import initialize - - initialize( - self.web_app, - self.root_dir, - self.connection_url, - self.terminado_settings, - ) - self.terminals_available = True - except ImportError as e: - self.log.warning(_i18n("Terminals not available (error was %s)"), e) - - def init_signal(self): - if not sys.platform.startswith("win") and sys.stdin and sys.stdin.isatty(): + def init_signal(self) -> None: + """Initialize signal handlers.""" + if ( + not sys.platform.startswith("win") + and sys.stdin # type:ignore[truthy-bool] + and sys.stdin.isatty() + ): signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._signal_stop) if hasattr(signal, "SIGUSR1"): @@ -2060,7 +2234,7 @@ def init_signal(self): # only on BSD-based systems signal.signal(signal.SIGINFO, self._signal_info) - def _handle_sigint(self, sig, frame): + def _handle_sigint(self, sig: t.Any, frame: t.Any) -> None: """SIGINT handler spawns confirmation dialog""" # register more forceful signal handler for ^C^C case signal.signal(signal.SIGINT, self._signal_stop) @@ -2070,11 +2244,11 @@ def _handle_sigint(self, sig, frame): thread.daemon = True thread.start() - def _restore_sigint_handler(self): + def _restore_sigint_handler(self) -> None: """callback for restoring original SIGINT handler""" signal.signal(signal.SIGINT, self._handle_sigint) - def _confirm_exit(self): + def _confirm_exit(self) -> None: """confirm shutdown on ^C A second ^C, or answering 'y' within 5s will cause shutdown, @@ -2091,7 +2265,7 @@ def _confirm_exit(self): # since this might be called from a signal handler self.stop(from_signal=True) return - print(self.running_server_info()) + info(self.running_server_info()) yes = _i18n("y") no = _i18n("n") sys.stdout.write(_i18n("Shutdown this Jupyter server (%s/[%s])? ") % (yes, no)) @@ -2106,27 +2280,32 @@ def _confirm_exit(self): self.stop(from_signal=True) return else: - print(_i18n("No answer for 5s:"), end=" ") - print(_i18n("resuming operation...")) + if self._stopping: + # don't show 'no answer' if we're actually stopping, + # e.g. ctrl-C ctrl-C + return + info(_i18n("No answer for 5s:")) + info(_i18n("resuming operation...")) # no answer, or answer is no: # set it back to original SIGINT handler # use IOLoop.add_callback because signal.signal must be called # from main thread self.io_loop.add_callback_from_signal(self._restore_sigint_handler) - def _signal_stop(self, sig, frame): + def _signal_stop(self, sig: t.Any, frame: t.Any) -> None: + """Handle a stop signal.""" self.log.critical(_i18n("received signal %s, stopping"), sig) self.stop(from_signal=True) - def _signal_info(self, sig, frame): - print(self.running_server_info()) + def _signal_info(self, sig: t.Any, frame: t.Any) -> None: + """Handle an info signal.""" + self.log.info(self.running_server_info()) - def init_components(self): + def init_components(self) -> None: """Check the components submodule, and warn if it's unclean""" # TODO: this should still check, but now we use bower, not git submodule - pass - def find_server_extensions(self): + def find_server_extensions(self) -> None: """ Searches Jupyter paths for jpserver_extensions. """ @@ -2149,7 +2328,7 @@ def find_server_extensions(self): self.config.ServerApp.jpserver_extensions.update({modulename: enabled}) self.jpserver_extensions.update({modulename: enabled}) - def init_server_extensions(self): + def init_server_extensions(self) -> None: """ If an extension's metadata includes an 'app' key, the value must be a subclass of ExtensionApp. An instance @@ -2162,7 +2341,7 @@ def init_server_extensions(self): self.extension_manager.from_jpserver_extensions(self.jpserver_extensions) self.extension_manager.link_all_extensions() - def load_server_extensions(self): + def load_server_extensions(self) -> None: """Load any extensions specified by config. Import the module, then call the load_jupyter_server_extension function, @@ -2172,7 +2351,7 @@ def load_server_extensions(self): """ self.extension_manager.load_all_extensions() - def init_mime_overrides(self): + def init_mime_overrides(self) -> None: # On some Windows machines, an application has registered incorrect # mimetypes in the registry. # Tornado uses this when serving .css and .js files, causing browsers to @@ -2188,57 +2367,58 @@ def init_mime_overrides(self): # for python <3.8 mimetypes.add_type("application/wasm", ".wasm") - def shutdown_no_activity(self): + def shutdown_no_activity(self) -> None: """Shutdown server on timeout when there are no kernels or terminals.""" km = self.kernel_manager if len(km) != 0: return # Kernels still running - if self.terminals_available: - term_mgr = self.web_app.settings["terminal_manager"] - if term_mgr.terminals: - return # Terminals still running + if self.extension_manager.any_activity(): + return seconds_since_active = (utcnow() - self.web_app.last_activity()).total_seconds() self.log.debug("No activity for %d seconds.", seconds_since_active) if seconds_since_active > self.shutdown_no_activity_timeout: self.log.info( - "No kernels or terminals for %d seconds; shutting down.", + "No kernels for %d seconds; shutting down.", seconds_since_active, ) self.stop() - def init_shutdown_no_activity(self): + def init_shutdown_no_activity(self) -> None: + """Initialize a shutdown on no activity.""" if self.shutdown_no_activity_timeout > 0: self.log.info( - "Will shut down after %d seconds with no kernels or terminals.", + "Will shut down after %d seconds with no kernels.", self.shutdown_no_activity_timeout, ) pc = ioloop.PeriodicCallback(self.shutdown_no_activity, 60000) pc.start() @property - def http_server(self): + def http_server(self) -> httpserver.HTTPServer: """An instance of Tornado's HTTPServer class for the Server Web Application.""" try: return self._http_server - except AttributeError as e: - raise AttributeError( + except AttributeError: + msg = ( "An HTTPServer instance has not been created for the " "Server Web Application. To create an HTTPServer for this " "application, call `.init_httpserver()`." - ) from e + ) + raise AttributeError(msg) from None - def init_httpserver(self): + def init_httpserver(self) -> None: """Creates an instance of a Tornado HTTPServer for the Server Web Application and sets the http_server attribute. """ # Check that a web_app has been initialized before starting a server. if not hasattr(self, "web_app"): - raise AttributeError( + msg = ( "A tornado web application has not be initialized. " "Try calling `.init_webapp()` first." ) + raise AttributeError(msg) # Create an instance of the server. self._http_server = httpserver.HTTPServer( @@ -2249,7 +2429,14 @@ def init_httpserver(self): max_buffer_size=self.max_buffer_size, ) - success = self._bind_http_server() + # binding sockets must be called from inside an event loop + if not self.sock: + self._find_http_port() + self.io_loop.add_callback(self._bind_http_server) + + def _bind_http_server(self) -> None: + """Bind our http server.""" + success = self._bind_http_server_unix() if self.sock else self._bind_http_server_tcp() if not success: self.log.critical( _i18n( @@ -2259,10 +2446,8 @@ def init_httpserver(self): ) self.exit(1) - def _bind_http_server(self): - return self._bind_http_server_unix() if self.sock else self._bind_http_server_tcp() - - def _bind_http_server_unix(self): + def _bind_http_server_unix(self) -> bool: + """Bind an http server on unix.""" if unix_socket_in_use(self.sock): self.log.warning(_i18n("The socket %s is already in use.") % self.sock) return False @@ -2282,11 +2467,19 @@ def _bind_http_server_unix(self): else: return True - def _bind_http_server_tcp(self): - success = None + def _bind_http_server_tcp(self) -> bool: + """Bind a tcp server.""" + self.http_server.listen(self.port, self.ip) + return True + + def _find_http_port(self) -> None: + """Find an available http port.""" + success = False + port = self.port for port in random_ports(self.port, self.port_retries + 1): try: - self.http_server.listen(port, self.ip) + sockets = bind_sockets(port, self.ip) + sockets[0].close() except OSError as e: if e.errno == errno.EADDRINUSE: if self.port_retries: @@ -2296,17 +2489,16 @@ def _bind_http_server_tcp(self): else: self.log.info(_i18n("The port %i is already in use.") % port) continue - elif e.errno in ( + if e.errno in ( errno.EACCES, getattr(errno, "WSAEACCES", errno.EACCES), ): self.log.warning(_i18n("Permission to listen on port %i denied.") % port) continue - else: - raise + raise else: - self.port = port success = True + self.port = port break if not success: if self.port_retries: @@ -2325,10 +2517,9 @@ def _bind_http_server_tcp(self): % port ) self.exit(1) - return success @staticmethod - def _init_asyncio_patch(): + def _init_asyncio_patch() -> None: """set default asyncio policy to be compatible with tornado Tornado 6.0 is not compatible with default asyncio @@ -2343,10 +2534,7 @@ def _init_asyncio_patch(): import asyncio try: - from asyncio import ( - WindowsProactorEventLoopPolicy, - WindowsSelectorEventLoopPolicy, - ) + from asyncio import WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy except ImportError: pass # not affected @@ -2358,11 +2546,11 @@ def _init_asyncio_patch(): @catch_config_error def initialize( self, - argv=None, - find_extensions=True, - new_httpserver=True, - starter_extension=None, - ): + argv: t.Optional[list[str]] = None, + find_extensions: bool = True, + new_httpserver: bool = True, + starter_extension: t.Any = None, + ) -> None: """Initialize the Server application class, configurables, web application, and http server. Parameters @@ -2371,7 +2559,7 @@ def initialize( CLI arguments to parse. find_extensions : bool If True, find and load extensions listed in Jupyter config paths. If False, - only load extensions that are passed to ServerApp directy through + only load extensions that are passed to ServerApp directly through the `argv`, `config`, or `jpserver_extensions` arguments. new_httpserver : bool If True, a tornado HTTPServer instance will be created and configured for the Server Web @@ -2386,11 +2574,16 @@ def initialize( super().initialize(argv=argv) if self._dispatching: return + # initialize io loop as early as possible, + # so configurables, extensions may reference the event loop + self.init_ioloop() + # Then, use extensions' config loading mechanism to # update config. ServerApp config takes precedence. if find_extensions: self.find_server_extensions() self.init_logging() + self.init_event_logger() self.init_server_extensions() # Special case the starter extension and load @@ -2409,68 +2602,49 @@ def initialize( self.init_configurables() self.init_components() self.init_webapp() - self.init_terminals() self.init_signal() - self.init_ioloop() self.load_server_extensions() self.init_mime_overrides() self.init_shutdown_no_activity() if new_httpserver: self.init_httpserver() - async def cleanup_kernels(self): + async def cleanup_kernels(self) -> None: """Shutdown all kernels. The kernels will shutdown themselves when this process no longer exists, but explicit shutdown allows the KernelManagers to cleanup the connection files. """ + if not getattr(self, "kernel_manager", None): + return n_kernels = len(self.kernel_manager.list_kernel_ids()) kernel_msg = trans.ngettext( "Shutting down %d kernel", "Shutting down %d kernels", n_kernels ) self.log.info(kernel_msg % n_kernels) - await run_sync_in_loop(self.kernel_manager.shutdown_all()) + await ensure_async(self.kernel_manager.shutdown_all()) - async def cleanup_terminals(self): - """Shutdown all terminals. - - The terminals will shutdown themselves when this process no longer exists, - but explicit shutdown allows the TerminalManager to cleanup. - """ - if not self.terminals_available: - return - - terminal_manager = self.web_app.settings["terminal_manager"] - n_terminals = len(terminal_manager.list()) - terminal_msg = trans.ngettext( - "Shutting down %d terminal", "Shutting down %d terminals", n_terminals - ) - self.log.info(terminal_msg % n_terminals) - await run_sync_in_loop(terminal_manager.terminate_all()) - - async def cleanup_extensions(self): + async def cleanup_extensions(self) -> None: """Call shutdown hooks in all extensions.""" + if not getattr(self, "extension_manager", None): + return n_extensions = len(self.extension_manager.extension_apps) extension_msg = trans.ngettext( "Shutting down %d extension", "Shutting down %d extensions", n_extensions ) self.log.info(extension_msg % n_extensions) - await run_sync_in_loop(self.extension_manager.stop_all_extensions()) + await ensure_async(self.extension_manager.stop_all_extensions()) - def running_server_info(self, kernel_count=True): - "Return the current working directory and the server url information" - info = self.contents_manager.info_string() + "\n" + def running_server_info(self, kernel_count: bool = True) -> str: + """Return the current working directory and the server url information""" + info = t.cast(str, self.contents_manager.info_string()) + "\n" if kernel_count: n_kernels = len(self.kernel_manager.list_kernel_ids()) kernel_msg = trans.ngettext("%d active kernel", "%d active kernels", n_kernels) info += kernel_msg % n_kernels info += "\n" # Format the info so that the URL fits on a single line in 80 char display - info += _i18n( - "Jupyter Server {version} is running at:\n{url}".format( - version=ServerApp.version, url=self.display_url - ) - ) + info += _i18n(f"Jupyter Server {ServerApp.version} is running at:\n{self.display_url}") if self.gateway_config.gateway_enabled: info += ( _i18n("\nKernels will be managed by the Gateway server running at:\n%s") @@ -2478,7 +2652,7 @@ def running_server_info(self, kernel_count=True): ) return info - def server_info(self): + def server_info(self) -> dict[str, t.Any]: """Return a JSONable dict of information about this server.""" return { "url": self.connection_url, @@ -2487,22 +2661,22 @@ def server_info(self): "sock": self.sock, "secure": bool(self.certfile), "base_url": self.base_url, - "token": self.token, + "token": self.identity_provider.token, "root_dir": os.path.abspath(self.root_dir), "password": bool(self.password), "pid": os.getpid(), "version": ServerApp.version, } - def write_server_info_file(self): + def write_server_info_file(self) -> None: """Write the result of server_info() to the JSON file info_file.""" try: with secure_write(self.info_file) as f: json.dump(self.server_info(), f, indent=2, sort_keys=True) except OSError as e: - self.log.error(_i18n("Failed to write server-info to %s: %s"), self.info_file, e) + self.log.error(_i18n("Failed to write server-info to %s: %r"), self.info_file, e) - def remove_server_info_file(self): + def remove_server_info_file(self) -> None: """Remove the jpserver-.json file created for this server. Ignores the error raised when the file has already been removed. @@ -2513,7 +2687,7 @@ def remove_server_info_file(self): if e.errno != errno.ENOENT: raise - def _resolve_file_to_run_and_root_dir(self): + def _resolve_file_to_run_and_root_dir(self) -> str: """Returns a relative path from file_to_run to root_dir. If root_dir and file_to_run are incompatible, i.e. on different subtrees, @@ -2543,17 +2717,19 @@ def _resolve_file_to_run_and_root_dir(self): "is on the same path as `root_dir`." ) self.exit(1) + return "" - def _write_browser_open_file(self, url, fh): - if self.token: - url = url_concat(url, {"token": self.token}) + def _write_browser_open_file(self, url: str, fh: t.Any) -> None: + """Write the browser open file.""" + if self.identity_provider.token: + url = url_concat(url, {"token": self.identity_provider.token}) url = url_path_join(self.connection_url, url) jinja2_env = self.web_app.settings["jinja2_env"] template = jinja2_env.get_template("browser-open.html") fh.write(template.render(open_url=url, base_url=self.base_url)) - def write_browser_open_files(self): + def write_browser_open_files(self) -> None: """Write an `browser_open_file` and `browser_open_file_to_run` files This can be used to open a file directly in a browser. @@ -2574,7 +2750,7 @@ def write_browser_open_files(self): with open(self.browser_open_file_to_run, "w", encoding="utf-8") as f: self._write_browser_open_file(file_open_url, f) - def write_browser_open_file(self): + def write_browser_open_file(self) -> None: """Write an jpserver--open.html file This can be used to open the notebook in a browser @@ -2585,7 +2761,7 @@ def write_browser_open_file(self): with open(self.browser_open_file, "w", encoding="utf-8") as f: self._write_browser_open_file(open_url, f) - def remove_browser_open_files(self): + def remove_browser_open_files(self) -> None: """Remove the `browser_open_file` and `browser_open_file_to_run` files created for this server. @@ -2598,7 +2774,7 @@ def remove_browser_open_files(self): if e.errno != errno.ENOENT: raise - def remove_browser_open_file(self): + def remove_browser_open_file(self) -> None: """Remove the jpserver--open.html file created for this server. Ignores the error raised when the file has already been removed. @@ -2609,14 +2785,15 @@ def remove_browser_open_file(self): if e.errno != errno.ENOENT: raise - def _prepare_browser_open(self): + def _prepare_browser_open(self) -> tuple[str, t.Optional[str]]: + """Prepare to open the browser.""" if not self.use_redirect_file: uri = self.default_url[len(self.base_url) :] - if self.token: - uri = url_concat(uri, {"token": self.token}) + if self.identity_provider.token: + uri = url_concat(uri, {"token": self.identity_provider.token}) - if self.file_to_run: + if self.file_to_run: # noqa: SIM108 # Create a separate, temporary open-browser-file # pointing at a specific file. open_file = self.browser_open_file_to_run @@ -2631,11 +2808,16 @@ def _prepare_browser_open(self): return assembled_url, open_file - def launch_browser(self): + def launch_browser(self) -> None: + """Launch the browser.""" + # Deferred import for environments that do not have + # the webbrowser module. + import webbrowser + try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: - self.log.warning(_i18n("No web browser found: %s.") % e) + self.log.warning(_i18n("No web browser found: %r.") % e) browser = None if not browser: @@ -2644,11 +2826,13 @@ def launch_browser(self): assembled_url, _ = self._prepare_browser_open() def target(): + assert browser is not None browser.open(assembled_url, new=self.webbrowser_open_new) threading.Thread(target=target).start() - def start_app(self): + def start_app(self) -> None: + """Start the Jupyter Server application.""" super().start() if not self.allow_root: @@ -2682,13 +2866,15 @@ def start_app(self): ) self.write_server_info_file() - self.write_browser_open_files() + + if not self.no_browser_open_file: + self.write_browser_open_files() # Handle the browser opening. if self.open_browser and not self.sock: self.launch_browser() - if self.token and self._token_generated: + if self.identity_provider.token and self.identity_provider.token_generated: # log full URL with generated token, so there's a copy/pasteable link # with auth info. if self.sock: @@ -2700,27 +2886,35 @@ def start_app(self): "", ( "UNIX sockets are not browser-connectable, but you can tunnel to " - "the instance via e.g.`ssh -L 8888:%s -N user@this_host` and then " - "open e.g. %s in a browser." - ) - % (self.sock, self.connection_url), + "the instance via e.g.`ssh -L 8888:{} -N user@this_host` and then " + "open e.g. {} in a browser." + ).format(self.sock, self.connection_url), ] ) ) else: - self.log.critical( - "\n".join( - [ - "\n", + if self.no_browser_open_file: + message = [ + "\n", + _i18n("To access the server, copy and paste one of these URLs:"), + " %s" % self.display_url, + ] + else: + message = [ + "\n", + _i18n( "To access the server, open this file in a browser:", - " %s" % urljoin("file:", pathname2url(self.browser_open_file)), + ), + " %s" % urljoin("file:", pathname2url(self.browser_open_file)), + _i18n( "Or copy and paste one of these URLs:", - " %s" % self.display_url, - ] - ) - ) + ), + " %s" % self.display_url, + ] - async def _cleanup(self): + self.log.critical("\n".join(message)) + + async def _cleanup(self) -> None: """General cleanup of files, extensions and kernels created by this instance ServerApp. """ @@ -2728,9 +2922,28 @@ async def _cleanup(self): self.remove_browser_open_files() await self.cleanup_extensions() await self.cleanup_kernels() - await self.cleanup_terminals() + try: + await self.kernel_websocket_connection_class.close_all() # type:ignore[attr-defined] + except AttributeError: + # This can happen in two different scenarios: + # + # 1. During tests, where the _cleanup method is invoked without + # the corresponding initialize method having been invoked. + # 2. If the provided `kernel_websocket_connection_class` does not + # implement the `close_all` class method. + # + # In either case, we don't need to do anything and just want to treat + # the raised error as a no-op. + pass + if getattr(self, "kernel_manager", None): + self.kernel_manager.__del__() + if getattr(self, "session_manager", None): + self.session_manager.close() + if hasattr(self, "http_server"): + # Stop a server if its set. + self.http_server.stop() - def start_ioloop(self): + def start_ioloop(self) -> None: """Start the IO Loop.""" if sys.platform.startswith("win"): # add no-op to wake every 5s @@ -2742,11 +2955,11 @@ def start_ioloop(self): except KeyboardInterrupt: self.log.info(_i18n("Interrupted...")) - def init_ioloop(self): + def init_ioloop(self) -> None: """init self.io_loop so that an extension can use it by io_loop.call_later() to create background tasks""" self.io_loop = ioloop.IOLoop.current() - def start(self): + def start(self) -> None: """Start the Jupyter server app, after initialization This method takes no arguments so all configuration and initialization @@ -2754,14 +2967,17 @@ def start(self): self.start_app() self.start_ioloop() - async def _stop(self): + async def _stop(self) -> None: """Cleanup resources and stop the IO Loop.""" await self._cleanup() - self.io_loop.stop() + if getattr(self, "io_loop", None): + self.io_loop.stop() - def stop(self, from_signal=False): + def stop(self, from_signal: bool = False) -> None: """Cleanup resources and stop the server.""" - if hasattr(self, "_http_server"): + # signal that stopping has begun + self._stopping = True + if hasattr(self, "http_server"): # Stop a server if its set. self.http_server.stop() if getattr(self, "io_loop", None): @@ -2773,7 +2989,9 @@ def stop(self, from_signal=False): self.io_loop.add_callback(self._stop) -def list_running_servers(runtime_dir=None, log=None): +def list_running_servers( + runtime_dir: t.Optional[str] = None, log: t.Optional[logging.Logger] = None +) -> t.Generator[t.Any, None, None]: """Iterate over the server info files of running Jupyter servers. Given a runtime directory, find jpserver-* files in the security directory, @@ -2790,7 +3008,11 @@ def list_running_servers(runtime_dir=None, log=None): for file_name in os.listdir(runtime_dir): if re.match("jpserver-(.+).json", file_name): with open(os.path.join(runtime_dir, file_name), encoding="utf-8") as f: - info = json.load(f) + # Handle race condition where file is being written. + try: + info = json.load(f) + except json.JSONDecodeError: + continue # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field diff --git a/jupyter_server/services/api/api.yaml b/jupyter_server/services/api/api.yaml index 844831e045..5ee5c416bd 100644 --- a/jupyter_server/services/api/api.yaml +++ b/jupyter_server/services/api/api.yaml @@ -33,6 +33,16 @@ parameters: in: path description: file path type: string + permissions: + name: permissions + type: string + required: false + in: query + description: | + JSON-serialized dictionary of `{"resource": ["action",]}` + (dict of lists of strings) to check. + The same dictionary structure will be returned, + containing only the actions for which the user is authorized. checkpoint_id: name: checkpoint_id required: true @@ -53,6 +63,22 @@ parameters: type: string paths: + /api/: + get: + summary: Get the Jupyter Server version + description: | + This endpoint returns only the Jupyter Server version. + It does not require any authentication. + responses: + 200: + description: Jupyter Server version information + schema: + type: object + properties: + version: + type: string + description: The Jupyter Server version number as a string. + /api/contents/{path}: parameters: - $ref: "#/parameters/path" @@ -80,6 +106,10 @@ paths: in: query description: "Return content (0 for no content, 1 for return content)" type: integer + - name: hash + in: query + description: "May return hash hexdigest string of content and the hash algorithm (0 for no hash - default, 1 for return hash). It may be ignored by the content manager." + type: integer responses: 404: description: No item found @@ -578,7 +608,7 @@ paths: - terminals responses: 200: - description: Succesfully created a new terminal + description: Successfully created a new terminal schema: $ref: "#/definitions/Terminal" 403: @@ -611,12 +641,47 @@ paths: - $ref: "#/parameters/terminal_id" responses: 204: - description: Succesfully deleted terminal session + description: Successfully deleted terminal session 403: description: Forbidden to access 404: description: Not found - + /api/me: + get: + summary: | + Get the identity of the currently authenticated user. + If present, a `permissions` argument may be specified + to check what actions the user currently is authorized to take. + tags: + - identity + parameters: + - $ref: "#/parameters/permissions" + responses: + 200: + description: The user's identity and permissions + schema: + type: object + properties: + identity: + $ref: "#/definitions/Identity" + permissions: + $ref: "#/definitions/Permissions" + example: + identity: + username: minrk + name: Min Ragan-Kelley + display_name: Min RK + initials: MRK + avatar_url: null + color: null + permissions: + contents: + - read + - write + kernels: + - read + - write + - execute /api/status: get: summary: Get the current status/activity of the server. @@ -663,6 +728,53 @@ definitions: type: number description: | The total number of running kernels. + Identity: + description: The identity of the currently authenticated user + properties: + username: + type: string + description: | + Unique string identifying the user + name: + type: string + description: | + For-humans name of the user. + May be the same as `username` in systems where + only usernames are available. + display_name: + type: string + description: | + Alternate rendering of name for display. + Often the same as `name`. + initials: + type: string + description: | + Short string of initials. + Initials should not be derived automatically due to localization issues. + May be `null` if unavailable. + avatar_url: + type: string + description: | + URL of an avatar to be used for the user. + May be `null` if unavailable. + color: + type: string + description: | + A CSS color string to use as a preferred color, + such as for collaboration cursors. + May be `null` if unavailable. + Permissions: + type: object + description: | + A dict of the form: `{"resource": ["action",]}` + containing only the AUTHORIZED subset of resource+actions + from the permissions specified in the request. + If no permission checks were made in the request, + this will be empty. + additionalProperties: + type: array + items: + type: string KernelSpec: description: Kernel spec (contents of kernel.json) properties: @@ -777,7 +889,7 @@ definitions: kernel: $ref: "#/definitions/Kernel" Contents: - description: "A contents object. The content and format keys may be null if content is not contained. If type is 'file', then the mimetype will be null." + description: "A contents object. The content and format keys may be null if content is not contained. The hash maybe null if hash is not required. If type is 'file', then the mimetype will be null." type: object required: - type @@ -826,6 +938,12 @@ definitions: format: type: string description: Format of content (one of null, 'text', 'base64', 'json') + hash: + type: string + description: "[optional] The hexdigest hash string of content, if requested (otherwise null). It cannot be null if hash_algorithm is defined." + hash_algorithm: + type: string + description: "[optional] The algorithm used to produce the hash, if requested (otherwise null). It cannot be null if hash is defined." Checkpoints: description: A checkpoint object. type: object diff --git a/jupyter_server/services/api/handlers.py b/jupyter_server/services/api/handlers.py index 1c0cca5e19..efb361186c 100644 --- a/jupyter_server/services/api/handlers.py +++ b/jupyter_server/services/api/handlers.py @@ -3,12 +3,13 @@ # Distributed under the terms of the Modified BSD License. import json import os +from typing import Any, Dict, List +from jupyter_core.utils import ensure_async from tornado import web from jupyter_server._tz import isoformat, utcfromtimestamp -from jupyter_server.auth import authorized -from jupyter_server.utils import ensure_async +from jupyter_server.auth.decorator import authorized from ...base.handlers import APIHandler, JupyterHandler @@ -16,22 +17,28 @@ class APISpecHandler(web.StaticFileHandler, JupyterHandler): + """A spec handler for the REST API.""" + auth_resource = AUTH_RESOURCE def initialize(self): + """Initialize the API spec handler.""" web.StaticFileHandler.initialize(self, path=os.path.dirname(__file__)) @web.authenticated @authorized def get(self): + """Get the API spec.""" self.log.warning("Serving api spec (experimental, incomplete)") return web.StaticFileHandler.get(self, "api.yaml") def get_content_type(self): + """Get the content type.""" return "text/x-yaml" class APIStatusHandler(APIHandler): + """An API status handler.""" auth_resource = AUTH_RESOURCE _track_activity = False @@ -39,13 +46,14 @@ class APIStatusHandler(APIHandler): @web.authenticated @authorized async def get(self): + """Get the API status.""" # if started was missing, use unix epoch started = self.settings.get("started", utcfromtimestamp(0)) started = isoformat(started) kernels = await ensure_async(self.kernel_manager.list_kernels()) total_connections = sum(k["connections"] for k in kernels) - last_activity = isoformat(self.application.last_activity()) + last_activity = isoformat(self.application.last_activity()) # type:ignore[attr-defined] model = { "started": started, "last_activity": last_activity, @@ -55,7 +63,50 @@ async def get(self): self.finish(json.dumps(model, sort_keys=True)) +class IdentityHandler(APIHandler): + """Get the current user's identity model""" + + @web.authenticated + def get(self): + """Get the identity model.""" + permissions_json: str = self.get_argument("permissions", "") + bad_permissions_msg = f'permissions should be a JSON dict of {{"resource": ["action",]}}, got {permissions_json!r}' + if permissions_json: + try: + permissions_to_check = json.loads(permissions_json) + except ValueError as e: + raise web.HTTPError(400, bad_permissions_msg) from e + if not isinstance(permissions_to_check, dict): + raise web.HTTPError(400, bad_permissions_msg) + else: + permissions_to_check = {} + + permissions: Dict[str, List[str]] = {} + user = self.current_user + + for resource, actions in permissions_to_check.items(): + if ( + not isinstance(resource, str) + or not isinstance(actions, list) + or not all(isinstance(action, str) for action in actions) + ): + raise web.HTTPError(400, bad_permissions_msg) + + allowed = permissions[resource] = [] + for action in actions: + if self.authorizer.is_authorized(self, user=user, resource=resource, action=action): + allowed.append(action) + + identity: Dict[str, Any] = self.identity_provider.identity_model(user) + model = { + "identity": identity, + "permissions": permissions, + } + self.write(json.dumps(model)) + + default_handlers = [ (r"/api/spec.yaml", APISpecHandler), (r"/api/status", APIStatusHandler), + (r"/api/me", IdentityHandler), ] diff --git a/jupyter_server/services/config/__init__.py b/jupyter_server/services/config/__init__.py index 9a2aee241d..a28f60a2b3 100644 --- a/jupyter_server/services/config/__init__.py +++ b/jupyter_server/services/config/__init__.py @@ -1 +1,3 @@ -from .manager import ConfigManager # noqa +from .manager import ConfigManager + +__all__ = ["ConfigManager"] diff --git a/jupyter_server/services/config/handlers.py b/jupyter_server/services/config/handlers.py index 385672b2b3..743c98ef0b 100644 --- a/jupyter_server/services/config/handlers.py +++ b/jupyter_server/services/config/handlers.py @@ -5,7 +5,7 @@ from tornado import web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from ...base.handlers import APIHandler @@ -13,17 +13,21 @@ class ConfigHandler(APIHandler): + """A config API handler.""" + auth_resource = AUTH_RESOURCE @web.authenticated @authorized def get(self, section_name): + """Get config by section name.""" self.set_header("Content-Type", "application/json") self.finish(json.dumps(self.config_manager.get(section_name))) @web.authenticated @authorized def put(self, section_name): + """Set a config section by name.""" data = self.get_json_body() # Will raise 400 if content is not valid JSON self.config_manager.set(section_name, data) self.set_status(204) @@ -31,6 +35,7 @@ def put(self, section_name): @web.authenticated @authorized def patch(self, section_name): + """Update a config section by name.""" new_data = self.get_json_body() section = self.config_manager.update(section_name, new_data) self.finish(json.dumps(section)) diff --git a/jupyter_server/services/config/manager.py b/jupyter_server/services/config/manager.py index 5f04925fe7..720c8e7bd7 100644 --- a/jupyter_server/services/config/manager.py +++ b/jupyter_server/services/config/manager.py @@ -3,6 +3,7 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os.path +import typing as t from jupyter_core.paths import jupyter_config_dir, jupyter_config_path from traitlets import Instance, List, Unicode, default, observe @@ -22,7 +23,7 @@ class ConfigManager(LoggingConfigurable): def get(self, section_name): """Get the config from all config sections.""" - config = {} + config: t.Dict[str, t.Any] = {} # step through back to front, to ensure front of the list is top priority for p in self.read_config_path[::-1]: cm = BaseJSONConfigManager(config_dir=p) diff --git a/jupyter_server/services/contents/checkpoints.py b/jupyter_server/services/contents/checkpoints.py index 09ef4a8e81..e251f7b232 100644 --- a/jupyter_server/services/contents/checkpoints.py +++ b/jupyter_server/services/contents/checkpoints.py @@ -22,23 +22,23 @@ class Checkpoints(LoggingConfigurable): def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def rename_checkpoint(self, checkpoint_id, old_path, new_path): """Rename a single checkpoint from old_path to new_path.""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def delete_checkpoint(self, checkpoint_id, path): """delete a checkpoint for a file""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def list_checkpoints(self, path): """Return a list of checkpoints for a given file""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" @@ -75,13 +75,13 @@ class GenericCheckpointsMixin: def create_checkpoint(self, contents_mgr, path): model = contents_mgr.get(path, content=True) - type = model["type"] - if type == "notebook": + type_ = model["type"] + if type_ == "notebook": return self.create_notebook_checkpoint( model["content"], path, ) - elif type == "file": + elif type_ == "file": return self.create_file_checkpoint( model["content"], model["format"], @@ -92,13 +92,13 @@ def create_checkpoint(self, contents_mgr, path): def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" - type = contents_mgr.get(path, content=False)["type"] - if type == "notebook": + type_ = contents_mgr.get(path, content=False)["type"] + if type_ == "notebook": model = self.get_notebook_checkpoint(checkpoint_id, path) - elif type == "file": + elif type_ == "file": model = self.get_file_checkpoint(checkpoint_id, path) else: - raise HTTPError(500, "Unexpected type %s" % type) + raise HTTPError(500, "Unexpected type %s" % type_) contents_mgr.save(model, path) # Required Methods @@ -107,37 +107,39 @@ def create_file_checkpoint(self, content, format, path): Returns a checkpoint model for the new checkpoint. """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def create_notebook_checkpoint(self, nb, path): """Create a checkpoint of the current state of a file Returns a checkpoint model for the new checkpoint. """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def get_file_checkpoint(self, checkpoint_id, path): """Get the content of a checkpoint for a non-notebook file. - Returns a dict of the form: - { - 'type': 'file', - 'content': , - 'format': {'text','base64'}, - } + Returns a dict of the form:: + + { + 'type': 'file', + 'content': , + 'format': {'text','base64'}, + } """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def get_notebook_checkpoint(self, checkpoint_id, path): """Get the content of a checkpoint for a notebook. - Returns a dict of the form: - { - 'type': 'notebook', - 'content': , - } + Returns a dict of the form:: + + { + 'type': 'notebook', + 'content': , + } """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError class AsyncCheckpoints(Checkpoints): @@ -147,23 +149,23 @@ class AsyncCheckpoints(Checkpoints): async def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def rename_checkpoint(self, checkpoint_id, old_path, new_path): """Rename a single checkpoint from old_path to new_path.""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def delete_checkpoint(self, checkpoint_id, path): """delete a checkpoint for a file""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def list_checkpoints(self, path): """Return a list of checkpoints for a given file""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" @@ -184,30 +186,31 @@ class AsyncGenericCheckpointsMixin(GenericCheckpointsMixin): async def create_checkpoint(self, contents_mgr, path): model = await contents_mgr.get(path, content=True) - type = model["type"] - if type == "notebook": + type_ = model["type"] + if type_ == "notebook": return await self.create_notebook_checkpoint( model["content"], path, ) - elif type == "file": + elif type_ == "file": return await self.create_file_checkpoint( model["content"], model["format"], path, ) else: - raise HTTPError(500, "Unexpected type %s" % type) + raise HTTPError(500, "Unexpected type %s" % type_) async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" - type = await contents_mgr.get(path, content=False)["type"] - if type == "notebook": + content_model = await contents_mgr.get(path, content=False) + type_ = content_model["type"] + if type_ == "notebook": model = await self.get_notebook_checkpoint(checkpoint_id, path) - elif type == "file": + elif type_ == "file": model = await self.get_file_checkpoint(checkpoint_id, path) else: - raise HTTPError(500, "Unexpected type %s" % type) + raise HTTPError(500, "Unexpected type %s" % type_) await contents_mgr.save(model, path) # Required Methods @@ -216,34 +219,36 @@ async def create_file_checkpoint(self, content, format, path): Returns a checkpoint model for the new checkpoint. """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def create_notebook_checkpoint(self, nb, path): """Create a checkpoint of the current state of a file Returns a checkpoint model for the new checkpoint. """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def get_file_checkpoint(self, checkpoint_id, path): """Get the content of a checkpoint for a non-notebook file. - Returns a dict of the form: - { - 'type': 'file', - 'content': , - 'format': {'text','base64'}, - } + Returns a dict of the form:: + + { + 'type': 'file', + 'content': , + 'format': {'text','base64'}, + } """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def get_notebook_checkpoint(self, checkpoint_id, path): """Get the content of a checkpoint for a notebook. - Returns a dict of the form: - { - 'type': 'notebook', - 'content': , - } + Returns a dict of the form:: + + { + 'type': 'notebook', + 'content': , + } """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError diff --git a/jupyter_server/services/contents/filecheckpoints.py b/jupyter_server/services/contents/filecheckpoints.py index fb81e47250..522b3bbd01 100644 --- a/jupyter_server/services/contents/filecheckpoints.py +++ b/jupyter_server/services/contents/filecheckpoints.py @@ -43,10 +43,9 @@ class FileCheckpoints(FileManagerMixin, Checkpoints): root_dir = Unicode(config=True) def _root_dir_default(self): - try: - return self.parent.root_dir - except AttributeError: + if not self.parent: return os.getcwd() + return self.parent.root_dir # ContentsManager-dependent checkpoint API def create_checkpoint(self, contents_mgr, path): @@ -108,11 +107,7 @@ def checkpoint_path(self, checkpoint_id, path): parent, name = ("/" + path).rsplit("/", 1) parent = parent.strip("/") basename, ext = os.path.splitext(name) - filename = "{name}-{checkpoint_id}{ext}".format( - name=basename, - checkpoint_id=checkpoint_id, - ext=ext, - ) + filename = f"{basename}-{checkpoint_id}{ext}" os_path = self._get_os_path(path=parent) cp_dir = os.path.join(os_path, self.checkpoint_dir) with self.perm_to_403(): @@ -124,10 +119,10 @@ def checkpoint_model(self, checkpoint_id, os_path): """construct the info dict for a given checkpoint""" stats = os.stat(os_path) last_modified = tz.utcfromtimestamp(stats.st_mtime) - info = dict( - id=checkpoint_id, - last_modified=last_modified, - ) + info = { + "id": checkpoint_id, + "last_modified": last_modified, + } return info # Error Handling @@ -154,10 +149,10 @@ async def checkpoint_model(self, checkpoint_id, os_path): """construct the info dict for a given checkpoint""" stats = await run_sync(os.stat, os_path) last_modified = tz.utcfromtimestamp(stats.st_mtime) - info = dict( - id=checkpoint_id, - last_modified=last_modified, - ) + info = { + "id": checkpoint_id, + "last_modified": last_modified, + } return info # ContentsManager-independent checkpoint API @@ -257,7 +252,7 @@ def get_file_checkpoint(self, checkpoint_id, path): if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) - content, format = self._read_file(os_checkpoint_path, format=None) + content, format = self._read_file(os_checkpoint_path, format=None) # type: ignore[misc] return { "type": "file", "content": content, @@ -323,7 +318,7 @@ async def get_file_checkpoint(self, checkpoint_id, path): if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) - content, format = await self._read_file(os_checkpoint_path, format=None) + content, format = await self._read_file(os_checkpoint_path, format=None) # type: ignore[misc] return { "type": "file", "content": content, diff --git a/jupyter_server/services/contents/fileio.py b/jupyter_server/services/contents/fileio.py index d01bfd16dc..19f84f4653 100644 --- a/jupyter_server/services/contents/fileio.py +++ b/jupyter_server/services/contents/fileio.py @@ -3,7 +3,11 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. + +from __future__ import annotations + import errno +import hashlib import os import shutil from base64 import decodebytes, encodebytes @@ -13,10 +17,11 @@ import nbformat from anyio.to_thread import run_sync from tornado.web import HTTPError -from traitlets import Bool +from traitlets import Bool, Enum from traitlets.config import Configurable +from traitlets.config.configurable import LoggingConfigurable -from jupyter_server.utils import to_api_path, to_os_path +from jupyter_server.utils import ApiPath, to_api_path, to_os_path def replace_file(src, dst): @@ -104,9 +109,9 @@ def atomic_writing(path, text=True, encoding="utf-8", log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault("newline", "\n") - fileobj = open(path, "w", encoding=encoding, **kwargs) + fileobj = open(path, "w", encoding=encoding, **kwargs) # noqa: SIM115 else: - fileobj = open(path, "wb", **kwargs) + fileobj = open(path, "wb", **kwargs) # noqa: SIM115 try: yield fileobj @@ -152,9 +157,9 @@ def _simple_writing(path, text=True, encoding="utf-8", log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault("newline", "\n") - fileobj = open(path, "w", encoding=encoding, **kwargs) + fileobj = open(path, "w", encoding=encoding, **kwargs) # noqa: SIM115 else: - fileobj = open(path, "wb", **kwargs) + fileobj = open(path, "wb", **kwargs) # noqa: SIM115 try: yield fileobj @@ -165,7 +170,7 @@ def _simple_writing(path, text=True, encoding="utf-8", log=None, **kwargs): fileobj.close() -class FileManagerMixin(Configurable): +class FileManagerMixin(LoggingConfigurable, Configurable): """ Mixin for ContentsAPI classes that interact with the filesystem. @@ -187,16 +192,22 @@ class FileManagerMixin(Configurable): True, config=True, help="""By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. - This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). + This procedure, namely 'atomic_writing', causes some bugs on file system without operation order enforcement (like some networked fs). If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""", ) + hash_algorithm = Enum( # type: ignore[call-overload] + hashlib.algorithms_available, + default_value="sha256", + config=True, + help="Hash algorithm to use for file content, support by hashlib", + ) + @contextmanager def open(self, os_path, *args, **kwargs): """wrapper around io.open that turns permission errors into 403""" - with self.perm_to_403(os_path): - with open(os_path, *args, **kwargs) as f: - yield f + with self.perm_to_403(os_path), open(os_path, *args, **kwargs) as f: + yield f @contextmanager def atomic_writing(self, os_path, *args, **kwargs): @@ -204,11 +215,12 @@ def atomic_writing(self, os_path, *args, **kwargs): Depending on flag 'use_atomic_writing', the wrapper perform an actual atomic writing or simply writes the file (whatever an old exists or not)""" with self.perm_to_403(os_path): + kwargs["log"] = self.log if self.use_atomic_writing: - with atomic_writing(os_path, *args, log=self.log, **kwargs) as f: + with atomic_writing(os_path, *args, **kwargs) as f: yield f else: - with _simple_writing(os_path, *args, log=self.log, **kwargs) as f: + with _simple_writing(os_path, *args, **kwargs) as f: yield f @contextmanager @@ -223,7 +235,7 @@ def perm_to_403(self, os_path=""): # but nobody should be doing that anyway. if not os_path: os_path = e.filename or "unknown file" - path = to_api_path(os_path, root=self.root_dir) + path = to_api_path(os_path, root=self.root_dir) # type:ignore[attr-defined] raise HTTPError(403, "Permission denied: %s" % path) from e else: raise @@ -240,53 +252,75 @@ def _get_os_path(self, path): Parameters ---------- - path : string + path : str The relative API path to the named file. Returns ------- - path : string + path : str Native, absolute OS path to for a file. Raises ------ 404: if path is outside root """ - root = os.path.abspath(self.root_dir) - os_path = to_os_path(path, root) + self.log.debug("Reading path from disk: %s", path) + root = os.path.abspath(self.root_dir) # type:ignore[attr-defined] + # to_os_path is not safe if path starts with a drive, since os.path.join discards first part + if os.path.splitdrive(path)[0]: + raise HTTPError(404, "%s is not a relative API path" % path) + os_path = to_os_path(ApiPath(path), root) + # validate os path + # e.g. "foo\0" raises ValueError: embedded null byte + try: + os.lstat(os_path) + except OSError: + # OSError could be FileNotFound, PermissionError, etc. + # those should raise (or not) elsewhere + pass + except ValueError: + raise HTTPError(404, f"{path} is not a valid path") from None + if not (os.path.abspath(os_path) + os.path.sep).startswith(root): raise HTTPError(404, "%s is outside root contents directory" % path) return os_path - def _read_notebook(self, os_path, as_version=4, capture_validation_error=None): + def _read_notebook( + self, os_path, as_version=4, capture_validation_error=None, raw: bool = False + ): """Read a notebook from an os path.""" - with self.open(os_path, "r", encoding="utf-8") as f: - try: - return nbformat.read( - f, as_version=as_version, capture_validation_error=capture_validation_error - ) - except Exception as e: - e_orig = e - - # If use_atomic_writing is enabled, we'll guess that it was also - # enabled when this notebook was written and look for a valid - # atomic intermediate. - tmp_path = path_to_intermediate(os_path) - - if not self.use_atomic_writing or not os.path.exists(tmp_path): - raise HTTPError( - 400, - f"Unreadable Notebook: {os_path} {e_orig!r}", - ) + answer = self._read_file(os_path, "text", raw=raw) + + try: + nb = nbformat.reads( + answer[0], + as_version=as_version, + capture_validation_error=capture_validation_error, + ) - # Move the bad file aside, restore the intermediate, and try again. - invalid_file = path_to_invalid(os_path) - replace_file(os_path, invalid_file) - replace_file(tmp_path, os_path) - return self._read_notebook( - os_path, as_version, capture_validation_error=capture_validation_error + return (nb, answer[2]) if raw else nb # type:ignore[misc] + except Exception as e: + e_orig = e + + # If use_atomic_writing is enabled, we'll guess that it was also + # enabled when this notebook was written and look for a valid + # atomic intermediate. + tmp_path = path_to_intermediate(os_path) + + if not self.use_atomic_writing or not os.path.exists(tmp_path): + raise HTTPError( + 400, + f"Unreadable Notebook: {os_path} {e_orig!r}", ) + # Move the bad file aside, restore the intermediate, and try again. + invalid_file = path_to_invalid(os_path) + replace_file(os_path, invalid_file) + replace_file(tmp_path, os_path) + return self._read_notebook( + os_path, as_version, capture_validation_error=capture_validation_error, raw=raw + ) + def _save_notebook(self, os_path, nb, capture_validation_error=None): """Save a notebook to an os_path.""" with self.atomic_writing(os_path, encoding="utf-8") as f: @@ -297,14 +331,46 @@ def _save_notebook(self, os_path, nb, capture_validation_error=None): capture_validation_error=capture_validation_error, ) - def _read_file(self, os_path, format): + def _get_hash(self, byte_content: bytes) -> dict[str, str]: + """Compute the hash hexdigest for the provided bytes. + + The hash algorithm is provided by the `hash_algorithm` attribute. + + Parameters + ---------- + byte_content : bytes + The bytes to hash + + Returns + ------- + A dictionary to be appended to a model {"hash": str, "hash_algorithm": str}. + """ + algorithm = self.hash_algorithm + h = hashlib.new(algorithm) + h.update(byte_content) + return {"hash": h.hexdigest(), "hash_algorithm": algorithm} + + def _read_file( + self, os_path: str, format: str | None, raw: bool = False + ) -> tuple[str | bytes, str] | tuple[str | bytes, str, bytes]: """Read a non-notebook file. - os_path: The path to be read. - format: - If 'text', the contents will be decoded as UTF-8. - If 'base64', the raw bytes contents will be encoded as base64. - If not specified, try to decode as UTF-8, and fall back to base64 + Parameters + ---------- + os_path: str + The path to be read. + format: str + If 'text', the contents will be decoded as UTF-8. + If 'base64', the raw bytes contents will be encoded as base64. + If 'byte', the raw bytes contents will be returned. + If not specified, try to decode as UTF-8, and fall back to base64 + raw: bool + [Optional] If True, will return as third argument the raw bytes content + + Returns + ------- + (content, format, byte_content) It returns the content in the given format + as well as the raw byte content. """ if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) @@ -312,11 +378,22 @@ def _read_file(self, os_path, format): with self.open(os_path, "rb") as f: bcontent = f.read() + if format == "byte": + # Not for http response but internal use + return (bcontent, "byte", bcontent) if raw else (bcontent, "byte") + if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: - return bcontent.decode("utf8"), "text" + return ( + (bcontent.decode("utf8"), "text", bcontent) + if raw + else ( + bcontent.decode("utf8"), + "text", + ) + ) except UnicodeError as e: if format == "text": raise HTTPError( @@ -324,7 +401,14 @@ def _read_file(self, os_path, format): "%s is not UTF-8 encoded" % os_path, reason="bad format", ) from e - return encodebytes(bcontent).decode("ascii"), "base64" + return ( + (encodebytes(bcontent).decode("ascii"), "base64", bcontent) + if raw + else ( + encodebytes(bcontent).decode("ascii"), + "base64", + ) + ) def _save_file(self, os_path, content, format): """Save content of a generic file.""" @@ -358,39 +442,45 @@ async def _copy(self, src, dest): """ await async_copy2_safe(src, dest, log=self.log) - async def _read_notebook(self, os_path, as_version=4, capture_validation_error=None): + async def _read_notebook( + self, os_path, as_version=4, capture_validation_error=None, raw: bool = False + ): """Read a notebook from an os path.""" - with self.open(os_path, "r", encoding="utf-8") as f: - try: - return await run_sync( - partial( - nbformat.read, - as_version=as_version, - capture_validation_error=capture_validation_error, - ), - f, - ) - except Exception as e: - e_orig = e - - # If use_atomic_writing is enabled, we'll guess that it was also - # enabled when this notebook was written and look for a valid - # atomic intermediate. - tmp_path = path_to_intermediate(os_path) - - if not self.use_atomic_writing or not os.path.exists(tmp_path): - raise HTTPError( - 400, - f"Unreadable Notebook: {os_path} {e_orig!r}", - ) + answer = await self._read_file(os_path, "text", raw) - # Move the bad file aside, restore the intermediate, and try again. - invalid_file = path_to_invalid(os_path) - await async_replace_file(os_path, invalid_file) - await async_replace_file(tmp_path, os_path) - return await self._read_notebook( - os_path, as_version, capture_validation_error=capture_validation_error + try: + nb = await run_sync( + partial( + nbformat.reads, + as_version=as_version, + capture_validation_error=capture_validation_error, + ), + answer[0], ) + return (nb, answer[2]) if raw else nb # type:ignore[misc] + except Exception as e: + e_orig = e + + # If use_atomic_writing is enabled, we'll guess that it was also + # enabled when this notebook was written and look for a valid + # atomic intermediate. + tmp_path = path_to_intermediate(os_path) + + if not self.use_atomic_writing or not os.path.exists(tmp_path): + raise HTTPError( + 400, + f"Unreadable Notebook: {os_path} {e_orig!r}", + ) + + # Move the bad file aside, restore the intermediate, and try again. + invalid_file = path_to_invalid(os_path) + await async_replace_file(os_path, invalid_file) + await async_replace_file(tmp_path, os_path) + answer = await self._read_notebook( + os_path, as_version, capture_validation_error=capture_validation_error, raw=raw + ) + + return answer async def _save_notebook(self, os_path, nb, capture_validation_error=None): """Save a notebook to an os_path.""" @@ -405,14 +495,27 @@ async def _save_notebook(self, os_path, nb, capture_validation_error=None): f, ) - async def _read_file(self, os_path, format): + async def _read_file( # type: ignore[override] + self, os_path: str, format: str | None, raw: bool = False + ) -> tuple[str | bytes, str] | tuple[str | bytes, str, bytes]: """Read a non-notebook file. - os_path: The path to be read. - format: - If 'text', the contents will be decoded as UTF-8. - If 'base64', the raw bytes contents will be encoded as base64. - If not specified, try to decode as UTF-8, and fall back to base64 + Parameters + ---------- + os_path: str + The path to be read. + format: str + If 'text', the contents will be decoded as UTF-8. + If 'base64', the raw bytes contents will be encoded as base64. + If 'byte', the raw bytes contents will be returned. + If not specified, try to decode as UTF-8, and fall back to base64 + raw: bool + [Optional] If True, will return as third argument the raw bytes content + + Returns + ------- + (content, format, byte_content) It returns the content in the given format + as well as the raw byte content. """ if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) @@ -420,11 +523,22 @@ async def _read_file(self, os_path, format): with self.open(os_path, "rb") as f: bcontent = await run_sync(f.read) + if format == "byte": + # Not for http response but internal use + return (bcontent, "byte", bcontent) if raw else (bcontent, "byte") + if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: - return bcontent.decode("utf8"), "text" + return ( + (bcontent.decode("utf8"), "text", bcontent) + if raw + else ( + bcontent.decode("utf8"), + "text", + ) + ) except UnicodeError as e: if format == "text": raise HTTPError( @@ -432,7 +546,11 @@ async def _read_file(self, os_path, format): "%s is not UTF-8 encoded" % os_path, reason="bad format", ) from e - return encodebytes(bcontent).decode("ascii"), "base64" + return ( + (encodebytes(bcontent).decode("ascii"), "base64", bcontent) + if raw + else (encodebytes(bcontent).decode("ascii"), "base64") + ) async def _save_file(self, os_path, content, format): """Save content of a generic file.""" diff --git a/jupyter_server/services/contents/filemanager.py b/jupyter_server/services/contents/filemanager.py index 88aa0e3620..c56a1acc70 100644 --- a/jupyter_server/services/contents/filemanager.py +++ b/jupyter_server/services/contents/filemanager.py @@ -1,52 +1,62 @@ """A contents manager that uses the local file system for storage.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import errno +import math import mimetypes import os +import platform import shutil import stat +import subprocess import sys +import typing as t +import warnings from datetime import datetime +from pathlib import Path import nbformat from anyio.to_thread import run_sync from jupyter_core.paths import exists, is_file_hidden, is_hidden from send2trash import send2trash from tornado import web -from traitlets import Bool, TraitError, Unicode, default, validate +from traitlets import Bool, Int, TraitError, Unicode, default, validate from jupyter_server import _tz as tz from jupyter_server.base.handlers import AuthenticatedFileHandler from jupyter_server.transutils import _i18n +from jupyter_server.utils import to_api_path from .filecheckpoints import AsyncFileCheckpoints, FileCheckpoints from .fileio import AsyncFileManagerMixin, FileManagerMixin -from .manager import AsyncContentsManager, ContentsManager +from .manager import AsyncContentsManager, ContentsManager, copy_pat try: from os.path import samefile except ImportError: - # windows + py2 - from jupyter_server.utils import samefile_simple as samefile + # windows + from jupyter_server.utils import samefile_simple as samefile # type:ignore[assignment] _script_exporter = None class FileContentsManager(FileManagerMixin, ContentsManager): + """A file contents manager.""" root_dir = Unicode(config=True) + max_copy_folder_size_mb = Int(500, config=True, help="The max folder size that can be copied") + @default("root_dir") def _default_root_dir(self): - try: - return self.parent.root_dir - except AttributeError: + if not self.parent: return os.getcwd() + return self.parent.root_dir @validate("root_dir") def _validate_root_dir(self, proposal): - """Do a bit of validation of the root_dir.""" value = proposal["value"] if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. @@ -55,6 +65,36 @@ def _validate_root_dir(self, proposal): raise TraitError("%r is not a directory" % value) return value + @default("preferred_dir") + def _default_preferred_dir(self): + if not self.parent: + return "" + try: + value = self.parent.preferred_dir + if value == self.parent.root_dir: + value = None + except AttributeError: + pass + else: + if value is not None: + warnings.warn( + "ServerApp.preferred_dir config is deprecated in jupyter-server 2.0. Use FileContentsManager.preferred_dir instead", + FutureWarning, + stacklevel=3, + ) + try: + path = Path(value) + return path.relative_to(self.root_dir).as_posix() + except ValueError: + raise TraitError("%s is outside root contents directory" % value) from None + return "" + + @validate("preferred_dir") + def _validate_preferred_dir(self, proposal): + # It should be safe to pass an API path through this method: + proposal["value"] = to_api_path(proposal["value"], self.root_dir) + return super()._validate_preferred_dir(proposal) + @default("checkpoints_class") def _checkpoints_class_default(self): return FileCheckpoints @@ -90,7 +130,7 @@ def is_hidden(self, path): Parameters ---------- - path : string + path : str The path to check. This is an API path (`/` separated, relative to root_dir). @@ -108,7 +148,7 @@ def is_writable(self, path): Parameters ---------- - path : string + path : str The path to check. This is an API path (`/` separated, relative to root_dir). @@ -132,7 +172,7 @@ def file_exists(self, path): Parameters ---------- - path : string + path : str The relative path to the file (with '/' as separator) Returns @@ -151,7 +191,7 @@ def dir_exists(self, path): Parameters ---------- - path : string + path : str The path to check. This is an API path (`/` separated, relative to root_dir). @@ -171,7 +211,7 @@ def exists(self, path): Parameters ---------- - path : string + path : str The API path to the file (with '/' as separator) Returns @@ -188,6 +228,12 @@ def _base_model(self, path): os_path = self._get_os_path(path) info = os.lstat(os_path) + four_o_four = "file or directory does not exist: %r" % path + + if not self.allow_hidden and is_hidden(os_path, self.root_dir): + self.log.info("Refusing to serve hidden file or directory %r, via 404 Error", os_path) + raise web.HTTPError(404, four_o_four) + try: # size of file size = info.st_size @@ -222,6 +268,8 @@ def _base_model(self, path): model["mimetype"] = None model["size"] = size model["writable"] = self.is_writable(path) + model["hash"] = None + model["hash_algorithm"] = None return model @@ -236,7 +284,7 @@ def _dir_model(self, path, content=True): if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) - elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: + elif not self.allow_hidden and is_hidden(os_path, self.root_dir): self.log.info("Refusing to serve hidden directory %r, via 404 Error", os_path) raise web.HTTPError(404, four_o_four) @@ -250,7 +298,7 @@ def _dir_model(self, path, content=True): try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: - self.log.warning("failed to decode filename '%s': %s", name, e) + self.log.warning("failed to decode filename '%s': %r", name, e) continue try: @@ -260,7 +308,7 @@ def _dir_model(self, path, content=True): if e.errno == errno.ENOENT: self.log.warning("%s doesn't exist", os_path) elif e.errno != errno.EACCES: # Don't provide clues about protected files - self.log.warning("Error stat-ing %s: %s", os_path, e) + self.log.warning("Error stat-ing %s: %r", os_path, e) continue if ( @@ -272,9 +320,10 @@ def _dir_model(self, path, content=True): continue try: - if self.should_list(name): - if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): - contents.append(self.get(path=f"{path}/{name}", content=False)) + if self.should_list(name) and ( + self.allow_hidden or not is_file_hidden(os_path, stat_res=st) + ): + contents.append(self.get(path=f"{path}/{name}", content=False)) except OSError as e: # ELOOP: recursive symlink, also don't show failure due to permissions if e.errno not in [errno.ELOOP, errno.EACCES]: @@ -288,7 +337,7 @@ def _dir_model(self, path, content=True): return model - def _file_model(self, path, content=True, format=None): + def _file_model(self, path, content=True, format=None, require_hash=False): """Build a model for a file if content is requested, include the file contents. @@ -297,6 +346,8 @@ def _file_model(self, path, content=True, format=None): If 'text', the contents will be decoded as UTF-8. If 'base64', the raw bytes contents will be encoded as base64. If not specified, try to decode as UTF-8, and fall back to base64 + + if require_hash is true, the model will include 'hash' """ model = self._base_model(path) model["type"] = "file" @@ -304,8 +355,9 @@ def _file_model(self, path, content=True, format=None): os_path = self._get_os_path(path) model["mimetype"] = mimetypes.guess_type(os_path)[0] + bytes_content = None if content: - content, format = self._read_file(os_path, format) + content, format, bytes_content = self._read_file(os_path, format, raw=True) # type: ignore[misc] if model["mimetype"] is None: default_mime = { "text": "text/plain", @@ -318,31 +370,44 @@ def _file_model(self, path, content=True, format=None): format=format, ) + if require_hash: + if bytes_content is None: + bytes_content, _ = self._read_file(os_path, "byte") # type: ignore[assignment,misc] + model.update(**self._get_hash(bytes_content)) # type: ignore[arg-type] + return model - def _notebook_model(self, path, content=True): + def _notebook_model(self, path, content=True, require_hash=False): """Build a notebook model if content is requested, the notebook content will be populated as a JSON structure (not double-serialized) + + if require_hash is true, the model will include 'hash' """ model = self._base_model(path) model["type"] = "notebook" os_path = self._get_os_path(path) + bytes_content = None if content: - validation_error = {} - nb = self._read_notebook( - os_path, as_version=4, capture_validation_error=validation_error + validation_error: dict[str, t.Any] = {} + nb, bytes_content = self._read_notebook( + os_path, as_version=4, capture_validation_error=validation_error, raw=True ) self.mark_trusted_cells(nb, path) model["content"] = nb model["format"] = "json" self.validate_notebook_model(model, validation_error) + if require_hash: + if bytes_content is None: + bytes_content, _ = self._read_file(os_path, "byte") # type: ignore[misc] + model.update(**self._get_hash(bytes_content)) # type: ignore[arg-type] + return model - def get(self, path, content=True, type=None, format=None): + def get(self, path, content=True, type=None, format=None, require_hash=False): """Takes a path for an entity and returns its model Parameters @@ -357,6 +422,8 @@ def get(self, path, content=True, type=None, format=None): format : str, optional The requested format for file contents. 'text' or 'base64'. Ignored if this returns a notebook or directory model. + require_hash: bool, optional + Whether to include the hash of the file contents. Returns ------- @@ -365,11 +432,16 @@ def get(self, path, content=True, type=None, format=None): of the file or directory as well. """ path = path.strip("/") + os_path = self._get_os_path(path) + four_o_four = "file or directory does not exist: %r" % path if not self.exists(path): - raise web.HTTPError(404, "No such file or directory: %s" % path) + raise web.HTTPError(404, four_o_four) + + if not self.allow_hidden and is_hidden(os_path, self.root_dir): + self.log.info("Refusing to serve hidden file or directory %r, via 404 Error", os_path) + raise web.HTTPError(404, four_o_four) - os_path = self._get_os_path(path) if os.path.isdir(os_path): if type not in (None, "directory"): raise web.HTTPError( @@ -379,17 +451,20 @@ def get(self, path, content=True, type=None, format=None): ) model = self._dir_model(path, content=content) elif type == "notebook" or (type is None and path.endswith(".ipynb")): - model = self._notebook_model(path, content=content) + model = self._notebook_model(path, content=content, require_hash=require_hash) else: if type == "directory": raise web.HTTPError(400, "%s is not a directory" % path, reason="bad type") - model = self._file_model(path, content=content, format=format) + model = self._file_model( + path, content=content, format=format, require_hash=require_hash + ) + self.emit(data={"action": "get", "path": path}) return model def _save_directory(self, os_path, model, path=""): """create a directory""" - if is_hidden(os_path, self.root_dir) and not self.allow_hidden: - raise web.HTTPError(400, "Cannot create hidden directory %r" % os_path) + if not self.allow_hidden and is_hidden(os_path, self.root_dir): + raise web.HTTPError(400, "Cannot create directory %r" % os_path) if not os.path.exists(os_path): with self.perm_to_403(): os.mkdir(os_path) @@ -408,11 +483,14 @@ def save(self, model, path=""): raise web.HTTPError(400, "No file type provided") if "content" not in model and model["type"] != "directory": raise web.HTTPError(400, "No file content provided") - os_path = self._get_os_path(path) + + if not self.allow_hidden and is_hidden(os_path, self.root_dir): + raise web.HTTPError(400, f"Cannot create file or directory {os_path!r}") + self.log.debug("Saving %s", os_path) - validation_error = {} + validation_error: dict[str, t.Any] = {} try: if model["type"] == "notebook": nb = nbformat.from_dict(model["content"]) @@ -444,7 +522,7 @@ def save(self, model, path=""): model["message"] = validation_message self.run_post_save_hooks(model=model, os_path=os_path) - + self.emit(data={"action": "save", "path": path}) return model def delete_file(self, path): @@ -452,19 +530,13 @@ def delete_file(self, path): path = path.strip("/") os_path = self._get_os_path(path) rm = os.unlink - if not os.path.exists(os_path): - raise web.HTTPError(404, "File or directory does not exist: %s" % os_path) - def _check_trash(os_path): - if sys.platform in {"win32", "darwin"}: - return True + if not self.allow_hidden and is_hidden(os_path, self.root_dir): + raise web.HTTPError(400, f"Cannot delete file or directory {os_path!r}") - # It's a bit more nuanced than this, but until we can better - # distinguish errors from send2trash, assume that we can only trash - # files on the same partition as the home directory. - file_dev = os.stat(os_path).st_dev - home_dev = os.stat(os.path.expanduser("~")).st_dev - return file_dev == home_dev + four_o_four = "file or directory does not exist: %r" % path + if not self.exists(path): + raise web.HTTPError(404, four_o_four) def is_non_empty_dir(os_path): if os.path.isdir(os_path): @@ -481,20 +553,15 @@ def is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. raise web.HTTPError(400, "Directory %s not empty" % os_path) - if _check_trash(os_path): - # Looking at the code in send2trash, I don't think the errors it - # raises let us distinguish permission errors from other errors in - # code. So for now, the "look before you leap" approach is used. - if not self.is_writable(path): - raise web.HTTPError(403, "Permission denied: %s" % path) - self.log.debug("Sending %s to trash", os_path) + # send2trash now supports deleting directories. see #1290 + if not self.is_writable(path): + raise web.HTTPError(403, "Permission denied: %s" % path) from None + self.log.debug("Sending %s to trash", os_path) + try: send2trash(os_path) - return - else: - self.log.warning( - "Skipping trash for %s, on different device to home directory", - os_path, - ) + except OSError as e: + raise web.HTTPError(400, "send2trash failed: %s" % e) from e + return if os.path.isdir(os_path): # Don't permanently delete non-empty directories. @@ -518,6 +585,11 @@ def rename_file(self, old_path, new_path): new_os_path = self._get_os_path(new_path) old_os_path = self._get_os_path(old_path) + if not self.allow_hidden and ( + is_hidden(old_os_path, self.root_dir) or is_hidden(new_os_path, self.root_dir) + ): + raise web.HTTPError(400, f"Cannot rename file or directory {old_os_path!r}") + # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): raise web.HTTPError(409, "File already exists: %s" % new_path) @@ -532,20 +604,141 @@ def rename_file(self, old_path, new_path): raise web.HTTPError(500, f"Unknown error renaming file: {old_path} {e}") from e def info_string(self): + """Get the information string for the manager.""" return _i18n("Serving notebooks from local directory: %s") % self.root_dir def get_kernel_path(self, path, model=None): """Return the initial API path of a kernel associated with a given notebook""" if self.dir_exists(path): return path + parent_dir = path.rsplit("/", 1)[0] if "/" in path else "" + return parent_dir + + def copy(self, from_path, to_path=None): + """ + Copy an existing file or directory and return its new model. + If to_path not specified, it will be the parent directory of from_path. + If copying a file and to_path is a directory, filename/directoryname will increment `from_path-Copy#.ext`. + Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. + For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. + from_path must be a full path to a file or directory. + """ + to_path_original = str(to_path) + path = from_path.strip("/") + if to_path is not None: + to_path = to_path.strip("/") + if "/" in path: - parent_dir = path.rsplit("/", 1)[0] + from_dir, from_name = path.rsplit("/", 1) else: - parent_dir = "" - return parent_dir + from_dir = "" + from_name = path + + model = self.get(path) + # limit the size of folders being copied to prevent a timeout error + if model["type"] == "directory": + self.check_folder_size(path) + else: + # let the super class handle copying files + return super().copy(from_path=from_path, to_path=to_path) + + is_destination_specified = to_path is not None + to_name = copy_pat.sub(".", from_name) + if not is_destination_specified: + to_path = from_dir + if self.dir_exists(to_path): + name = copy_pat.sub(".", from_name) + to_name = super().increment_filename(name, to_path, insert="-Copy") + to_path = f"{to_path}/{to_name}" + + return self._copy_dir( + from_path=from_path, + to_path_original=to_path_original, + to_name=to_name, + to_path=to_path, + ) + + def _copy_dir(self, from_path, to_path_original, to_name, to_path): + """ + handles copying directories + returns the model for the copied directory + """ + try: + os_from_path = self._get_os_path(from_path.strip("/")) + os_to_path = f'{self._get_os_path(to_path_original.strip("/"))}/{to_name}' + shutil.copytree(os_from_path, os_to_path) + model = self.get(to_path, content=False) + except OSError as err: + self.log.error(f"OSError in _copy_dir: {err}") + raise web.HTTPError( + 400, + f"Can't copy '{from_path}' into Folder '{to_path}'", + ) from err + + return model + + def check_folder_size(self, path): + """ + limit the size of folders being copied to be no more than the + trait max_copy_folder_size_mb to prevent a timeout error + """ + limit_bytes = self.max_copy_folder_size_mb * 1024 * 1024 + size = int(self._get_dir_size(self._get_os_path(path))) + # convert from KB to Bytes for macOS + size = size * 1024 if platform.system() == "Darwin" else size + + if size > limit_bytes: + raise web.HTTPError( + 400, + f""" + Can't copy folders larger than {self.max_copy_folder_size_mb}MB, + "{path}" is {self._human_readable_size(size)} + """, + ) + + def _get_dir_size(self, path="."): + """ + calls the command line program du to get the directory size + """ + try: + if platform.system() == "Darwin": + # returns the size of the folder in KB + result = subprocess.run( + ["du", "-sk", path], capture_output=True, check=True + ).stdout.split() + else: + result = subprocess.run( + ["du", "-s", "--block-size=1", path], + capture_output=True, + check=True, + ).stdout.split() + + self.log.info(f"current status of du command {result}") + size = result[0].decode("utf-8") + except Exception: + self.log.warning( + "Not able to get the size of the %s directory. Copying might be slow if the directory is large!", + path, + ) + return "0" + return size + + def _human_readable_size(self, size): + """ + returns folder size in a human readable format + """ + if size == 0: + return "0 Bytes" + + units = ["Bytes", "KB", "MB", "GB", "TB", "PB"] + order = int(math.log2(size) / 10) if size else 0 + + return f"{size / (1 << (order * 10)):.4g} {units[order]}" class AsyncFileContentsManager(FileContentsManager, AsyncFileManagerMixin, AsyncContentsManager): + """An async file contents manager.""" + @default("checkpoints_class") def _checkpoints_class_default(self): return AsyncFileCheckpoints @@ -561,7 +754,7 @@ async def _dir_model(self, path, content=True): if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) - elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: + elif not self.allow_hidden and is_hidden(os_path, self.root_dir): self.log.info("Refusing to serve hidden directory %r, via 404 Error", os_path) raise web.HTTPError(404, four_o_four) @@ -576,7 +769,7 @@ async def _dir_model(self, path, content=True): try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: - self.log.warning("failed to decode filename '%s': %s", name, e) + self.log.warning("failed to decode filename '%s': %r", name, e) continue try: @@ -586,7 +779,7 @@ async def _dir_model(self, path, content=True): if e.errno == errno.ENOENT: self.log.warning("%s doesn't exist", os_path) elif e.errno != errno.EACCES: # Don't provide clues about protected files - self.log.warning("Error stat-ing %s: %s", os_path, e) + self.log.warning("Error stat-ing %s: %r", os_path, e) continue if ( @@ -598,9 +791,10 @@ async def _dir_model(self, path, content=True): continue try: - if self.should_list(name): - if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): - contents.append(await self.get(path=f"{path}/{name}", content=False)) + if self.should_list(name) and ( + self.allow_hidden or not is_file_hidden(os_path, stat_res=st) + ): + contents.append(await self.get(path=f"{path}/{name}", content=False)) except OSError as e: # ELOOP: recursive symlink, also don't show failure due to permissions if e.errno not in [errno.ELOOP, errno.EACCES]: @@ -614,7 +808,7 @@ async def _dir_model(self, path, content=True): return model - async def _file_model(self, path, content=True, format=None): + async def _file_model(self, path, content=True, format=None, require_hash=False): """Build a model for a file if content is requested, include the file contents. @@ -623,6 +817,8 @@ async def _file_model(self, path, content=True, format=None): If 'text', the contents will be decoded as UTF-8. If 'base64', the raw bytes contents will be encoded as base64. If not specified, try to decode as UTF-8, and fall back to base64 + + if require_hash is true, the model will include 'hash' """ model = self._base_model(path) model["type"] = "file" @@ -630,8 +826,9 @@ async def _file_model(self, path, content=True, format=None): os_path = self._get_os_path(path) model["mimetype"] = mimetypes.guess_type(os_path)[0] + bytes_content = None if content: - content, format = await self._read_file(os_path, format) + content, format, bytes_content = await self._read_file(os_path, format, raw=True) # type: ignore[misc] if model["mimetype"] is None: default_mime = { "text": "text/plain", @@ -644,9 +841,14 @@ async def _file_model(self, path, content=True, format=None): format=format, ) + if require_hash: + if bytes_content is None: + bytes_content, _ = await self._read_file(os_path, "byte") # type: ignore[assignment,misc] + model.update(**self._get_hash(bytes_content)) # type: ignore[arg-type] + return model - async def _notebook_model(self, path, content=True): + async def _notebook_model(self, path, content=True, require_hash=False): """Build a notebook model if content is requested, the notebook content will be populated @@ -656,19 +858,25 @@ async def _notebook_model(self, path, content=True): model["type"] = "notebook" os_path = self._get_os_path(path) + bytes_content = None if content: - validation_error = {} - nb = await self._read_notebook( - os_path, as_version=4, capture_validation_error=validation_error + validation_error: dict[str, t.Any] = {} + nb, bytes_content = await self._read_notebook( + os_path, as_version=4, capture_validation_error=validation_error, raw=True ) self.mark_trusted_cells(nb, path) model["content"] = nb model["format"] = "json" self.validate_notebook_model(model, validation_error) + if require_hash: + if bytes_content is None: + bytes_content, _ = await self._read_file(os_path, "byte") # type: ignore[misc] + model.update(**(self._get_hash(bytes_content))) # type: ignore[arg-type] + return model - async def get(self, path, content=True, type=None, format=None): + async def get(self, path, content=True, type=None, format=None, require_hash=False): """Takes a path for an entity and returns its model Parameters @@ -683,6 +891,8 @@ async def get(self, path, content=True, type=None, format=None): format : str, optional The requested format for file contents. 'text' or 'base64'. Ignored if this returns a notebook or directory model. + require_hash: bool, optional + Whether to include the hash of the file contents. Returns ------- @@ -705,16 +915,19 @@ async def get(self, path, content=True, type=None, format=None): ) model = await self._dir_model(path, content=content) elif type == "notebook" or (type is None and path.endswith(".ipynb")): - model = await self._notebook_model(path, content=content) + model = await self._notebook_model(path, content=content, require_hash=require_hash) else: if type == "directory": raise web.HTTPError(400, "%s is not a directory" % path, reason="bad type") - model = await self._file_model(path, content=content, format=format) + model = await self._file_model( + path, content=content, format=format, require_hash=require_hash + ) + self.emit(data={"action": "get", "path": path}) return model async def _save_directory(self, os_path, model, path=""): """create a directory""" - if is_hidden(os_path, self.root_dir) and not self.allow_hidden: + if not self.allow_hidden and is_hidden(os_path, self.root_dir): raise web.HTTPError(400, "Cannot create hidden directory %r" % os_path) if not os.path.exists(os_path): with self.perm_to_403(): @@ -728,7 +941,7 @@ async def save(self, model, path=""): """Save the file model and return the model with no content.""" path = path.strip("/") - self.run_pre_save_hook(model=model, path=path) + self.run_pre_save_hooks(model=model, path=path) if "type" not in model: raise web.HTTPError(400, "No file type provided") @@ -738,7 +951,7 @@ async def save(self, model, path=""): os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) - validation_error = {} + validation_error: dict[str, t.Any] = {} try: if model["type"] == "notebook": nb = nbformat.from_dict(model["content"]) @@ -770,7 +983,7 @@ async def save(self, model, path=""): model["message"] = validation_message self.run_post_save_hooks(model=model, os_path=os_path) - + self.emit(data={"action": "save", "path": path}) return model async def delete_file(self, path): @@ -778,19 +991,12 @@ async def delete_file(self, path): path = path.strip("/") os_path = self._get_os_path(path) rm = os.unlink - if not os.path.exists(os_path): - raise web.HTTPError(404, "File or directory does not exist: %s" % os_path) - async def _check_trash(os_path): - if sys.platform in {"win32", "darwin"}: - return True + if not self.allow_hidden and is_hidden(os_path, self.root_dir): + raise web.HTTPError(400, f"Cannot delete file or directory {os_path!r}") - # It's a bit more nuanced than this, but until we can better - # distinguish errors from send2trash, assume that we can only trash - # files on the same partition as the home directory. - file_dev = (await run_sync(os.stat, os_path)).st_dev - home_dev = (await run_sync(os.stat, os.path.expanduser("~"))).st_dev - return file_dev == home_dev + if not os.path.exists(os_path): + raise web.HTTPError(404, "File or directory does not exist: %s" % os_path) async def is_non_empty_dir(os_path): if os.path.isdir(os_path): @@ -812,20 +1018,15 @@ async def is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. raise web.HTTPError(400, "Directory %s not empty" % os_path) - if await _check_trash(os_path): - # Looking at the code in send2trash, I don't think the errors it - # raises let us distinguish permission errors from other errors in - # code. So for now, the "look before you leap" approach is used. - if not self.is_writable(path): - raise web.HTTPError(403, "Permission denied: %s" % path) - self.log.debug("Sending %s to trash", os_path) + # send2trash now supports deleting directories. see #1290 + if not self.is_writable(path): + raise web.HTTPError(403, "Permission denied: %s" % path) from None + self.log.debug("Sending %s to trash", os_path) + try: send2trash(os_path) - return - else: - self.log.warning( - "Skipping trash for %s, on different device to home directory", - os_path, - ) + except OSError as e: + raise web.HTTPError(400, "send2trash f`1ailed: %s" % e) from e + return if os.path.isdir(os_path): # Don't permanently delete non-empty directories. @@ -849,6 +1050,11 @@ async def rename_file(self, old_path, new_path): new_os_path = self._get_os_path(new_path) old_os_path = self._get_os_path(old_path) + if not self.allow_hidden and ( + is_hidden(old_os_path, self.root_dir) or is_hidden(new_os_path, self.root_dir) + ): + raise web.HTTPError(400, f"Cannot rename file or directory {old_os_path!r}") + # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): raise web.HTTPError(409, "File already exists: %s" % new_path) @@ -879,3 +1085,133 @@ async def is_hidden(self, path): path = path.strip("/") os_path = self._get_os_path(path=path) return is_hidden(os_path, self.root_dir) + + async def get_kernel_path(self, path, model=None): + """Return the initial API path of a kernel associated with a given notebook""" + if await self.dir_exists(path): + return path + parent_dir = path.rsplit("/", 1)[0] if "/" in path else "" + return parent_dir + + async def copy(self, from_path, to_path=None): + """ + Copy an existing file or directory and return its new model. + If to_path not specified, it will be the parent directory of from_path. + If copying a file and to_path is a directory, filename/directoryname will increment `from_path-Copy#.ext`. + Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. + For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. + from_path must be a full path to a file or directory. + """ + to_path_original = str(to_path) + path = from_path.strip("/") + if to_path is not None: + to_path = to_path.strip("/") + + if "/" in path: + from_dir, from_name = path.rsplit("/", 1) + else: + from_dir = "" + from_name = path + + model = await self.get(path) + # limit the size of folders being copied to prevent a timeout error + if model["type"] == "directory": + await self.check_folder_size(path) + else: + # let the super class handle copying files + return await AsyncContentsManager.copy(self, from_path=from_path, to_path=to_path) + + is_destination_specified = to_path is not None + to_name = copy_pat.sub(".", from_name) + if not is_destination_specified: + to_path = from_dir + if await self.dir_exists(to_path): + name = copy_pat.sub(".", from_name) + to_name = await super().increment_filename(name, to_path, insert="-Copy") + to_path = f"{to_path}/{to_name}" + + return await self._copy_dir( + from_path=from_path, + to_path_original=to_path_original, + to_name=to_name, + to_path=to_path, + ) + + async def _copy_dir( + self, from_path: str, to_path_original: str, to_name: str, to_path: str + ) -> dict[str, t.Any]: + """ + handles copying directories + returns the model for the copied directory + """ + try: + os_from_path = self._get_os_path(from_path.strip("/")) + os_to_path = f'{self._get_os_path(to_path_original.strip("/"))}/{to_name}' + shutil.copytree(os_from_path, os_to_path) + model = await self.get(to_path, content=False) + except OSError as err: + self.log.error(f"OSError in _copy_dir: {err}") + raise web.HTTPError( + 400, + f"Can't copy '{from_path}' into read-only Folder '{to_path}'", + ) from err + + return model # type:ignore[no-any-return] + + async def check_folder_size(self, path: str) -> None: + """ + limit the size of folders being copied to be no more than the + trait max_copy_folder_size_mb to prevent a timeout error + """ + limit_bytes = self.max_copy_folder_size_mb * 1024 * 1024 + + size = int(await self._get_dir_size(self._get_os_path(path))) + # convert from KB to Bytes for macOS + size = size * 1024 if platform.system() == "Darwin" else size + if size > limit_bytes: + raise web.HTTPError( + 400, + f""" + Can't copy folders larger than {self.max_copy_folder_size_mb}MB, + "{path}" is {await self._human_readable_size(size)} + """, + ) + + async def _get_dir_size(self, path: str = ".") -> str: + """ + calls the command line program du to get the directory size + """ + try: + if platform.system() == "Darwin": + # returns the size of the folder in KB + result = subprocess.run( + ["du", "-sk", path], capture_output=True, check=True + ).stdout.split() + else: + result = subprocess.run( + ["du", "-s", "--block-size=1", path], + capture_output=True, + check=True, + ).stdout.split() + + self.log.info(f"current status of du command {result}") + size = result[0].decode("utf-8") + except Exception: + self.log.warning( + "Not able to get the size of the %s directory. Copying might be slow if the directory is large!", + path, + ) + return "0" + return size + + async def _human_readable_size(self, size: int) -> str: + """ + returns folder size in a human readable format + """ + if size == 0: + return "0 Bytes" + + units = ["Bytes", "KB", "MB", "GB", "TB", "PB"] + order = int(math.log2(size) / 10) if size else 0 + + return f"{size / (1 << (order * 10)):.4g} {units[order]}" diff --git a/jupyter_server/services/contents/handlers.py b/jupyter_server/services/contents/handlers.py index 59c109ad84..a7c7ffff17 100644 --- a/jupyter_server/services/contents/handlers.py +++ b/jupyter_server/services/contents/handlers.py @@ -5,27 +5,53 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json +from http import HTTPStatus +from typing import Any, Dict, List try: from jupyter_client.jsonutil import json_default except ImportError: from jupyter_client.jsonutil import date_default as json_default +from jupyter_core.utils import ensure_async from tornado import web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from jupyter_server.base.handlers import APIHandler, JupyterHandler, path_regex -from jupyter_server.utils import ensure_async, url_escape, url_path_join +from jupyter_server.utils import url_escape, url_path_join AUTH_RESOURCE = "contents" -def validate_model(model, expect_content): +def _validate_keys(expect_defined: bool, model: Dict[str, Any], keys: List[str]): + """ + Validate that the keys are defined (i.e. not None) or not (i.e. None) + """ + + if expect_defined: + errors = [key for key in keys if model[key] is None] + if errors: + raise web.HTTPError( + 500, + f"Keys unexpectedly None: {errors}", + ) + else: + errors = {key: model[key] for key in keys if model[key] is not None} # type: ignore[assignment] + if errors: + raise web.HTTPError( + 500, + f"Keys unexpectedly not None: {errors}", + ) + + +def validate_model(model, expect_content=False, expect_hash=False): """ Validate a model returned by a ContentsManager method. If expect_content is True, then we expect non-null entries for 'content' and 'format'. + + If expect_hash is True, then we expect non-null entries for 'hash' and 'hash_algorithm'. """ required_keys = { "name", @@ -38,6 +64,8 @@ def validate_model(model, expect_content): "content", "format", } + if expect_hash: + required_keys.update(["hash", "hash_algorithm"]) missing = required_keys - set(model.keys()) if missing: raise web.HTTPError( @@ -45,28 +73,21 @@ def validate_model(model, expect_content): f"Missing Model Keys: {missing}", ) - maybe_none_keys = ["content", "format"] - if expect_content: - errors = [key for key in maybe_none_keys if model[key] is None] - if errors: - raise web.HTTPError( - 500, - f"Keys unexpectedly None: {errors}", - ) - else: - errors = {key: model[key] for key in maybe_none_keys if model[key] is not None} - if errors: - raise web.HTTPError( - 500, - f"Keys unexpectedly not None: {errors}", - ) + content_keys = ["content", "format"] + _validate_keys(expect_content, model, content_keys) + if expect_hash: + _validate_keys(expect_hash, model, ["hash", "hash_algorithm"]) class ContentsAPIHandler(APIHandler): + """A contents API handler.""" + auth_resource = AUTH_RESOURCE class ContentsHandler(ContentsAPIHandler): + """A contents handler.""" + def location_url(self, path): """Return the full URL location of a file. @@ -86,6 +107,12 @@ def _finish_model(self, model, location=True): self.set_header("Content-Type", "application/json") self.finish(json.dumps(model, default=json_default)) + async def _finish_error(self, code, message): + """Finish a JSON request with an error code and descriptive message""" + self.set_status(code) + self.write(message) + await self.finish() + @web.authenticated @authorized async def get(self, path=""): @@ -95,28 +122,64 @@ async def get(self, path=""): of the files and directories it contains. """ path = path or "" + cm = self.contents_manager + type = self.get_query_argument("type", default=None) if type not in {None, "directory", "file", "notebook"}: - raise web.HTTPError(400, "Type %r is invalid" % type) + # fall back to file if unknown type + type = "file" format = self.get_query_argument("format", default=None) if format not in {None, "text", "base64"}: raise web.HTTPError(400, "Format %r is invalid" % format) - content = self.get_query_argument("content", default="1") - if content not in {"0", "1"}: - raise web.HTTPError(400, "Content %r is invalid" % content) - content = int(content) - - model = await ensure_async( - self.contents_manager.get( - path=path, - type=type, - format=format, - content=content, + content_str = self.get_query_argument("content", default="1") + if content_str not in {"0", "1"}: + raise web.HTTPError(400, "Content %r is invalid" % content_str) + content = int(content_str or "") + + hash_str = self.get_query_argument("hash", default="0") + if hash_str not in {"0", "1"}: + raise web.HTTPError(400, f"Content {hash_str!r} is invalid") + require_hash = int(hash_str) + + if not cm.allow_hidden and await ensure_async(cm.is_hidden(path)): + await self._finish_error( + HTTPStatus.NOT_FOUND, f"file or directory {path!r} does not exist" ) - ) - validate_model(model, expect_content=content) - self._finish_model(model, location=False) + + try: + expect_hash = require_hash + try: + model = await ensure_async( + self.contents_manager.get( + path=path, + type=type, + format=format, + content=content, + require_hash=require_hash, + ) + ) + except TypeError: + # Fallback for ContentsManager not handling the require_hash argument + # introduced in 2.11 + expect_hash = False + model = await ensure_async( + self.contents_manager.get( + path=path, + type=type, + format=format, + content=content, + ) + ) + validate_model(model, expect_content=content, expect_hash=expect_hash) + self._finish_model(model, location=False) + except web.HTTPError as exc: + # 404 is okay in this context, catch exception and return 404 code to prevent stack trace on client + if exc.status_code == HTTPStatus.NOT_FOUND: + await self._finish_error( + HTTPStatus.NOT_FOUND, f"file or directory {path!r} does not exist" + ) + raise @web.authenticated @authorized @@ -126,8 +189,19 @@ async def patch(self, path=""): model = self.get_json_body() if model is None: raise web.HTTPError(400, "JSON body missing") + + old_path = model.get("path") + if ( + old_path + and not cm.allow_hidden + and ( + await ensure_async(cm.is_hidden(path)) or await ensure_async(cm.is_hidden(old_path)) + ) + ): + raise web.HTTPError(400, f"Cannot rename file or directory {path!r}") + model = await ensure_async(cm.update(model, path)) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) async def _copy(self, copy_from, copy_to=None): @@ -140,7 +214,7 @@ async def _copy(self, copy_from, copy_to=None): ) model = await ensure_async(self.contents_manager.copy(copy_from, copy_to)) self.set_status(201) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) async def _upload(self, model, path): @@ -148,7 +222,7 @@ async def _upload(self, model, path): self.log.info("Uploading file to %s", path) model = await ensure_async(self.contents_manager.new(model, path)) self.set_status(201) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) async def _new_untitled(self, path, type="", ext=""): @@ -158,7 +232,7 @@ async def _new_untitled(self, path, type="", ext=""): self.contents_manager.new_untitled(path=path, type=type, ext=ext) ) self.set_status(201) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) async def _save(self, model, path): @@ -167,7 +241,7 @@ async def _save(self, model, path): if not chunk or chunk == -1: # Avoid tedious log information self.log.info("Saving file at %s", path) model = await ensure_async(self.contents_manager.save(model, path)) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) @web.authenticated @@ -191,14 +265,22 @@ async def post(self, path=""): raise web.HTTPError(400, "Cannot POST to files, use PUT instead.") model = self.get_json_body() - - if model is not None: + if model: copy_from = model.get("copy_from") - ext = model.get("ext", "") - type = model.get("type", "") if copy_from: - await self._copy(copy_from, path) + if not cm.allow_hidden and ( + await ensure_async(cm.is_hidden(path)) + or await ensure_async(cm.is_hidden(copy_from)) + ): + raise web.HTTPError(400, f"Cannot copy file or directory {path!r}") + else: + await self._copy(copy_from, path) else: + ext = model.get("ext", "") + type = model.get("type", "") + if type not in {None, "", "directory", "file", "notebook"}: + # fall back to file if unknown type + type = "file" await self._new_untitled(path, type=type, ext=ext) else: await self._new_untitled(path) @@ -217,10 +299,21 @@ async def put(self, path=""): create a new empty notebook. """ model = self.get_json_body() + cm = self.contents_manager + if model: if model.get("copy_from"): raise web.HTTPError(400, "Cannot copy with PUT, only POST") + if not cm.allow_hidden and ( + (model.get("path") and await ensure_async(cm.is_hidden(model.get("path")))) + or await ensure_async(cm.is_hidden(path)) + ): + raise web.HTTPError(400, f"Cannot create file or directory {path!r}") + exists = await ensure_async(self.contents_manager.file_exists(path)) + if model.get("type", "") not in {None, "", "directory", "file", "notebook"}: + # fall back to file if unknown type + model["type"] = "file" if exists: await self._save(model, path) else: @@ -233,6 +326,10 @@ async def put(self, path=""): async def delete(self, path=""): """delete a file in the given path""" cm = self.contents_manager + + if not cm.allow_hidden and await ensure_async(cm.is_hidden(path)): + raise web.HTTPError(400, f"Cannot delete file or directory {path!r}") + self.log.warning("delete %s", path) await ensure_async(cm.delete(path)) self.set_status(204) @@ -240,6 +337,8 @@ async def delete(self, path=""): class CheckpointsHandler(ContentsAPIHandler): + """A checkpoints API handler.""" + @web.authenticated @authorized async def get(self, path=""): @@ -269,6 +368,8 @@ async def post(self, path=""): class ModifyCheckpointsHandler(ContentsAPIHandler): + """A checkpoints modification handler.""" + @web.authenticated @authorized async def post(self, path, checkpoint_id): @@ -291,9 +392,16 @@ async def delete(self, path, checkpoint_id): class NotebooksRedirectHandler(JupyterHandler): """Redirect /api/notebooks to /api/contents""" - SUPPORTED_METHODS = ("GET", "PUT", "PATCH", "POST", "DELETE") + SUPPORTED_METHODS = ( + "GET", + "PUT", + "PATCH", + "POST", + "DELETE", + ) # type:ignore[assignment] def get(self, path): + """Handle a notebooks redirect.""" self.log.warning("/api/notebooks is deprecated, use /api/contents") self.redirect(url_path_join(self.base_url, "api/contents", url_escape(path))) @@ -303,9 +411,10 @@ def get(self, path): class TrustNotebooksHandler(JupyterHandler): """Handles trust/signing of notebooks""" - @web.authenticated + @web.authenticated # type:ignore[misc] @authorized(resource=AUTH_RESOURCE) async def post(self, path=""): + """Trust a notebook by path.""" cm = self.contents_manager await ensure_async(cm.trust_notebook(path)) self.set_status(201) diff --git a/jupyter_server/services/contents/largefilemanager.py b/jupyter_server/services/contents/largefilemanager.py index f2a6c072fd..bb66b57758 100644 --- a/jupyter_server/services/contents/largefilemanager.py +++ b/jupyter_server/services/contents/largefilemanager.py @@ -54,6 +54,7 @@ def save(self, model, path=""): # Last chunk if chunk == -1: self.run_post_save_hooks(model=model, os_path=os_path) + self.emit(data={"action": "save", "path": path}) return model else: return super().save(model, path) @@ -125,6 +126,8 @@ async def save(self, model, path=""): # Last chunk if chunk == -1: self.run_post_save_hooks(model=model, os_path=os_path) + + self.emit(data={"action": "save", "path": path}) return model else: return await super().save(model, path) diff --git a/jupyter_server/services/contents/manager.py b/jupyter_server/services/contents/manager.py index 7bd6450803..b12a2055ec 100644 --- a/jupyter_server/services/contents/manager.py +++ b/jupyter_server/services/contents/manager.py @@ -1,12 +1,18 @@ """A base class for contents managers.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import itertools import json +import os import re +import typing as t import warnings from fnmatch import fnmatch +from jupyter_core.utils import ensure_async, run_sync +from jupyter_events import EventLogger from nbformat import ValidationError, sign from nbformat import validate as validate_nb from nbformat.v4 import new_notebook @@ -25,8 +31,9 @@ ) from traitlets.config.configurable import LoggingConfigurable +from jupyter_server import DEFAULT_EVENTS_SCHEMA_PATH, JUPYTER_SERVER_EVENTS_URI from jupyter_server.transutils import _i18n -from jupyter_server.utils import ensure_async, import_item +from jupyter_server.utils import import_item from ...files.handlers import FilesHandler from .checkpoints import AsyncCheckpoints, Checkpoints @@ -53,12 +60,61 @@ class ContentsManager(LoggingConfigurable): """ + event_schema_id = JUPYTER_SERVER_EVENTS_URI + "/contents_service/v1" + event_logger = Instance(EventLogger).tag(config=True) + + @default("event_logger") + def _default_event_logger(self): + if self.parent and hasattr(self.parent, "event_logger"): + return self.parent.event_logger + else: + # If parent does not have an event logger, create one. + logger = EventLogger() + schema_path = DEFAULT_EVENTS_SCHEMA_PATH / "contents_service" / "v1.yaml" + logger.register_event_schema(schema_path) + return logger + + def emit(self, data): + """Emit event using the core event schema from Jupyter Server's Contents Manager.""" + self.event_logger.emit(schema_id=self.event_schema_id, data=data) + root_dir = Unicode("/", config=True) + preferred_dir = Unicode( + "", + config=True, + help=_i18n( + "Preferred starting directory to use for notebooks. This is an API path (`/` separated, relative to root dir)" + ), + ) + + @validate("preferred_dir") + def _validate_preferred_dir(self, proposal): + value = proposal["value"].strip("/") + try: + import inspect + + if inspect.iscoroutinefunction(self.dir_exists): + dir_exists = run_sync(self.dir_exists)(value) + else: + dir_exists = self.dir_exists(value) + except HTTPError as e: + raise TraitError(e.log_message) from e + if not dir_exists: + raise TraitError(_i18n("Preferred directory not found: %r") % value) + if self.parent: + try: + if value != self.parent.preferred_dir: + self.parent.preferred_dir = os.path.join(self.root_dir, *value.split("/")) + except TraitError: + pass + return value + allow_hidden = Bool(False, config=True, help="Allow access to hidden files") notary = Instance(sign.NotebookNotary) + @default("notary") def _notary_default(self): return sign.NotebookNotary(parent=self) @@ -124,7 +180,8 @@ def _validate_pre_save_hook(self, proposal): if isinstance(value, str): value = import_item(self.pre_save_hook) if not callable(value): - raise TraitError("pre_save_hook must be callable") + msg = "pre_save_hook must be callable" + raise TraitError(msg) if callable(self.pre_save_hook): warnings.warn( f"Overriding existing pre_save_hook ({self.pre_save_hook.__name__}) with a new one ({value.__name__}).", @@ -159,7 +216,8 @@ def _validate_post_save_hook(self, proposal): if isinstance(value, str): value = import_item(value) if not callable(value): - raise TraitError("post_save_hook must be callable") + msg = "post_save_hook must be callable" + raise TraitError(msg) if callable(self.post_save_hook): warnings.warn( f"Overriding existing post_save_hook ({self.post_save_hook.__name__}) with a new one ({value.__name__}).", @@ -198,25 +256,30 @@ def run_post_save_hook(self, model, os_path): try: self.log.debug("Running post-save hook on %s", os_path) self.post_save_hook(os_path=os_path, model=model, contents_manager=self) - except Exception as e: + except Exception: self.log.error("Post-save hook failed o-n %s", os_path, exc_info=True) - raise HTTPError(500, "Unexpected error while running post hook save: %s" % e) from e + msg = "fUnexpected error while running post hook save: {e}" + raise HTTPError(500, msg) from None - _pre_save_hooks = List() - _post_save_hooks = List() + _pre_save_hooks: List[t.Any] = List() + _post_save_hooks: List[t.Any] = List() def register_pre_save_hook(self, hook): + """Register a pre save hook.""" if isinstance(hook, str): hook = import_item(hook) if not callable(hook): - raise RuntimeError("hook must be callable") + msg = "hook must be callable" + raise RuntimeError(msg) self._pre_save_hooks.append(hook) def register_post_save_hook(self, hook): + """Register a post save hook.""" if isinstance(hook, str): hook = import_item(hook) if not callable(hook): - raise RuntimeError("hook must be callable") + msg = "hook must be callable" + raise RuntimeError(msg) self._post_save_hooks.append(hook) def run_pre_save_hooks(self, model, path, **kwargs): @@ -268,10 +331,10 @@ def _default_checkpoints(self): @default("checkpoints_kwargs") def _default_checkpoints_kwargs(self): - return dict( - parent=self, - log=self.log, - ) + return { + "parent": self, + "log": self.log, + } files_handler_class = Type( FilesHandler, @@ -321,7 +384,7 @@ def dir_exists(self, path): Parameters ---------- - path : string + path : str The path to check Returns @@ -336,7 +399,7 @@ def is_hidden(self, path): Parameters ---------- - path : string + path : str The path to check. This is an API path (`/` separated, relative to root dir). @@ -357,7 +420,7 @@ def file_exists(self, path=""): Parameters ---------- - path : string + path : str The API path of a file to check for. Returns @@ -365,7 +428,7 @@ def file_exists(self, path=""): exists : bool Whether the file exists. """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def exists(self, path): """Does a file or directory exist at the given path? @@ -374,7 +437,7 @@ def exists(self, path): Parameters ---------- - path : string + path : str The API path of a file or directory to check for. Returns @@ -384,9 +447,17 @@ def exists(self, path): """ return self.file_exists(path) or self.dir_exists(path) - def get(self, path, content=True, type=None, format=None): - """Get a file or directory model.""" - raise NotImplementedError("must be implemented in a subclass") + def get(self, path, content=True, type=None, format=None, require_hash=False): + """Get a file or directory model. + + Parameters + ---------- + require_hash : bool + Whether the file hash must be returned or not. + + *Changed in version 2.11*: The *require_hash* parameter was added. + """ + raise NotImplementedError def save(self, model, path): """ @@ -396,17 +467,17 @@ def save(self, model, path): should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def delete_file(self, path): """Delete the file or directory at path.""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError def rename_file(self, old_path, new_path): """Rename a file or directory.""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError - # ContentsManager API part 2: methods that have useable default + # ContentsManager API part 2: methods that have usable default # implementations, but can be overridden in subclasses. def delete(self, path): @@ -416,11 +487,13 @@ def delete(self, path): raise HTTPError(400, "Can't delete root") self.delete_file(path) self.checkpoints.delete_all_checkpoints(path) + self.emit(data={"action": "delete", "path": path}) def rename(self, old_path, new_path): """Rename a file and any checkpoints associated with that file.""" self.rename_file(old_path, new_path) self.checkpoints.rename_all_checkpoints(old_path, new_path) + self.emit(data={"action": "rename", "path": new_path, "source_path": old_path}) def update(self, model, path): """Update the file's path @@ -436,6 +509,7 @@ def update(self, model, path): return model def info_string(self): + """The information string for the manager.""" return "Serving contents" def get_kernel_path(self, path, model=None): @@ -476,13 +550,8 @@ def increment_filename(self, filename, path="", insert=""): suffix = dot + ext for i in itertools.count(): - if i: - insert_i = f"{insert}{i}" - else: - insert_i = "" - name = "{basename}{insert}{suffix}".format( - basename=basename, insert=insert_i, suffix=suffix - ) + insert_i = f"{insert}{i}" if i else "" + name = f"{basename}{insert_i}{suffix}" if not self.exists(f"{path}/{name}"): break return name @@ -616,9 +685,11 @@ def copy(self, from_path, to_path=None): raise HTTPError(404, "No such directory: %s" % to_path) model = self.save(model, to_path) + self.emit(data={"action": "copy", "path": to_path, "source_path": from_path}) return model def log_info(self): + """Log the information string for the manager.""" self.log.info(self.info_string()) def trust_notebook(self, path): @@ -626,7 +697,7 @@ def trust_notebook(self, path): Parameters ---------- - path : string + path : str The path of a notebook """ model = self.get(path) @@ -644,7 +715,7 @@ def check_and_sign(self, nb, path=""): ---------- nb : dict The notebook dict - path : string + path : str The notebook's path (for logging) """ if self.notary.check_cells(nb): @@ -661,7 +732,7 @@ def mark_trusted_cells(self, nb, path=""): ---------- nb : dict The notebook object (in current nbformat) - path : string + path : str The notebook's path (for logging) """ trusted = self.notary.check_signature(nb) @@ -704,10 +775,10 @@ def _default_checkpoints(self): @default("checkpoints_kwargs") def _default_checkpoints_kwargs(self): - return dict( - parent=self, - log=self.log, - ) + return { + "parent": self, + "log": self.log, + } # ContentsManager API part 1: methods that must be # implemented in subclasses. @@ -721,7 +792,7 @@ async def dir_exists(self, path): Parameters ---------- - path : string + path : str The path to check Returns @@ -736,7 +807,7 @@ async def is_hidden(self, path): Parameters ---------- - path : string + path : str The path to check. This is an API path (`/` separated, relative to root dir). @@ -757,7 +828,7 @@ async def file_exists(self, path=""): Parameters ---------- - path : string + path : str The API path of a file to check for. Returns @@ -765,7 +836,7 @@ async def file_exists(self, path=""): exists : bool Whether the file exists. """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def exists(self, path): """Does a file or directory exist at the given path? @@ -774,7 +845,7 @@ async def exists(self, path): Parameters ---------- - path : string + path : str The API path of a file or directory to check for. Returns @@ -786,9 +857,17 @@ async def exists(self, path): self.dir_exists(path) ) - async def get(self, path, content=True, type=None, format=None): - """Get a file or directory model.""" - raise NotImplementedError("must be implemented in a subclass") + async def get(self, path, content=True, type=None, format=None, require_hash=False): + """Get a file or directory model. + + Parameters + ---------- + require_hash : bool + Whether the file hash must be returned or not. + + *Changed in version 2.11*: The *require_hash* parameter was added. + """ + raise NotImplementedError async def save(self, model, path): """ @@ -798,17 +877,17 @@ async def save(self, model, path): should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def delete_file(self, path): """Delete the file or directory at path.""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError async def rename_file(self, old_path, new_path): """Rename a file or directory.""" - raise NotImplementedError("must be implemented in a subclass") + raise NotImplementedError - # ContentsManager API part 2: methods that have useable default + # ContentsManager API part 2: methods that have usable default # implementations, but can be overridden in subclasses. async def delete(self, path): @@ -819,11 +898,13 @@ async def delete(self, path): await self.delete_file(path) await self.checkpoints.delete_all_checkpoints(path) + self.emit(data={"action": "delete", "path": path}) async def rename(self, old_path, new_path): """Rename a file and any checkpoints associated with that file.""" await self.rename_file(old_path, new_path) await self.checkpoints.rename_all_checkpoints(old_path, new_path) + self.emit(data={"action": "rename", "path": new_path, "source_path": old_path}) async def update(self, model, path): """Update the file's path @@ -864,13 +945,8 @@ async def increment_filename(self, filename, path="", insert=""): suffix = dot + ext for i in itertools.count(): - if i: - insert_i = f"{insert}{i}" - else: - insert_i = "" - name = "{basename}{insert}{suffix}".format( - basename=basename, insert=insert_i, suffix=suffix - ) + insert_i = f"{insert}{i}" if i else "" + name = f"{basename}{insert_i}{suffix}" file_exists = await ensure_async(self.exists(f"{path}/{name}")) if not file_exists: break @@ -985,6 +1061,7 @@ async def copy(self, from_path, to_path=None): raise HTTPError(404, "No such directory: %s" % to_path) model = await self.save(model, to_path) + self.emit(data={"action": "copy", "path": to_path, "source_path": from_path}) return model async def trust_notebook(self, path): @@ -992,7 +1069,7 @@ async def trust_notebook(self, path): Parameters ---------- - path : string + path : str The path of a notebook """ model = await self.get(path) @@ -1013,7 +1090,9 @@ async def restore_checkpoint(self, checkpoint_id, path): await self.checkpoints.restore_checkpoint(self, checkpoint_id, path) async def list_checkpoints(self, path): + """List the checkpoints for a path.""" return await self.checkpoints.list_checkpoints(path) async def delete_checkpoint(self, checkpoint_id, path): + """Delete a checkpoint for a path by id.""" return await self.checkpoints.delete_checkpoint(checkpoint_id, path) diff --git a/jupyter_server/services/events/__init__.py b/jupyter_server/services/events/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/jupyter_server/services/events/handlers.py b/jupyter_server/services/events/handlers.py new file mode 100644 index 0000000000..5c52e75ad8 --- /dev/null +++ b/jupyter_server/services/events/handlers.py @@ -0,0 +1,127 @@ +"""A Websocket Handler for emitting Jupyter server events. + +.. versionadded:: 2.0 +""" +from __future__ import annotations + +import json +from datetime import datetime +from typing import Any, Dict, Optional, cast + +import jupyter_events.logger +from tornado import web, websocket + +from jupyter_server.auth.decorator import authorized +from jupyter_server.base.handlers import JupyterHandler + +from ...base.handlers import APIHandler + +AUTH_RESOURCE = "events" + + +class SubscribeWebsocket( + JupyterHandler, + websocket.WebSocketHandler, +): + """Websocket handler for subscribing to events""" + + auth_resource = AUTH_RESOURCE + + def pre_get(self): + """Handles authentication/authorization when + attempting to subscribe to events emitted by + Jupyter Server's eventbus. + """ + # authenticate the request before opening the websocket + user = self.current_user + if user is None: + self.log.warning("Couldn't authenticate WebSocket connection") + raise web.HTTPError(403) + + # authorize the user. + if not self.authorizer.is_authorized(self, user, "execute", "events"): + raise web.HTTPError(403) + + async def get(self, *args, **kwargs): + """Get an event socket.""" + self.pre_get() + res = super().get(*args, **kwargs) + if res is not None: + await res + + async def event_listener( + self, logger: jupyter_events.logger.EventLogger, schema_id: str, data: dict[str, Any] + ) -> None: + """Write an event message.""" + capsule = dict(schema_id=schema_id, **data) + self.write_message(json.dumps(capsule)) + + def open(self): + """Routes events that are emitted by Jupyter Server's + EventBus to a WebSocket client in the browser. + """ + self.event_logger.add_listener(listener=self.event_listener) + + def on_close(self): + """Handle a socket close.""" + self.event_logger.remove_listener(listener=self.event_listener) + + +def validate_model(data: dict[str, Any]) -> None: + """Validates for required fields in the JSON request body""" + required_keys = {"schema_id", "version", "data"} + for key in required_keys: + if key not in data: + raise web.HTTPError(400, f"Missing `{key}` in the JSON request body.") + + +def get_timestamp(data: dict[str, Any]) -> Optional[datetime]: + """Parses timestamp from the JSON request body""" + try: + if "timestamp" in data: + timestamp = datetime.strptime(data["timestamp"], "%Y-%m-%dT%H:%M:%S%zZ") + else: + timestamp = None + except Exception as e: + raise web.HTTPError( + 400, + """Failed to parse timestamp from JSON request body, + an ISO format datetime string with UTC offset is expected, + for example, 2022-05-26T13:50:00+05:00Z""", + ) from e + + return timestamp + + +class EventHandler(APIHandler): + """REST api handler for events""" + + auth_resource = AUTH_RESOURCE + + @web.authenticated + @authorized + async def post(self): + """Emit an event.""" + payload = self.get_json_body() + if payload is None: + raise web.HTTPError(400, "No JSON data provided") + + try: + validate_model(payload) + self.event_logger.emit( + schema_id=cast(str, payload.get("schema_id")), + data=cast("Dict[str, Any]", payload.get("data")), + timestamp_override=get_timestamp(payload), + ) + self.set_status(204) + self.finish() + except web.HTTPError: + raise + except Exception as e: + raise web.HTTPError(500, str(e)) from e + + +default_handlers = [ + (r"/api/events", EventHandler), + (r"/api/events/subscribe", SubscribeWebsocket), +] diff --git a/jupyter_server/services/kernels/connection/__init__.py b/jupyter_server/services/kernels/connection/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/jupyter_server/services/kernels/connection/abc.py b/jupyter_server/services/kernels/connection/abc.py new file mode 100644 index 0000000000..71f9e8254f --- /dev/null +++ b/jupyter_server/services/kernels/connection/abc.py @@ -0,0 +1,29 @@ +from abc import ABC, abstractmethod +from typing import Any, List + + +class KernelWebsocketConnectionABC(ABC): + """ + This class defines a minimal interface that should + be used to bridge the connection between Jupyter + Server's websocket API and a kernel's ZMQ socket + interface. + """ + + websocket_handler: Any + + @abstractmethod + async def connect(self): + """Connect the kernel websocket to the kernel ZMQ connections""" + + @abstractmethod + async def disconnect(self): + """Disconnect the kernel websocket from the kernel ZMQ connections""" + + @abstractmethod + def handle_incoming_message(self, incoming_msg: str) -> None: + """Broker the incoming websocket message to the appropriate ZMQ channel.""" + + @abstractmethod + def handle_outgoing_message(self, stream: str, outgoing_msg: List[Any]) -> None: + """Broker outgoing ZMQ messages to the kernel websocket.""" diff --git a/jupyter_server/services/kernels/connection/base.py b/jupyter_server/services/kernels/connection/base.py new file mode 100644 index 0000000000..1f6b2fdcf4 --- /dev/null +++ b/jupyter_server/services/kernels/connection/base.py @@ -0,0 +1,180 @@ +"""Kernel connection helpers.""" +import json +import struct +from typing import Any, List + +from jupyter_client.session import Session +from tornado.websocket import WebSocketHandler +from traitlets import Float, Instance, Unicode, default +from traitlets.config import LoggingConfigurable + +try: + from jupyter_client.jsonutil import json_default +except ImportError: + from jupyter_client.jsonutil import date_default as json_default + +from jupyter_client.jsonutil import extract_dates + +from jupyter_server.transutils import _i18n + +from .abc import KernelWebsocketConnectionABC + + +def serialize_binary_message(msg): + """serialize a message as a binary blob + + Header: + + 4 bytes: number of msg parts (nbufs) as 32b int + 4 * nbufs bytes: offset for each buffer as integer as 32b int + + Offsets are from the start of the buffer, including the header. + + Returns + ------- + The message serialized to bytes. + + """ + # don't modify msg or buffer list in-place + msg = msg.copy() + buffers = list(msg.pop("buffers")) + bmsg = json.dumps(msg, default=json_default).encode("utf8") + buffers.insert(0, bmsg) + nbufs = len(buffers) + offsets = [4 * (nbufs + 1)] + for buf in buffers[:-1]: + offsets.append(offsets[-1] + len(buf)) + offsets_buf = struct.pack("!" + "I" * (nbufs + 1), nbufs, *offsets) + buffers.insert(0, offsets_buf) + return b"".join(buffers) + + +def deserialize_binary_message(bmsg): + """deserialize a message from a binary blog + + Header: + + 4 bytes: number of msg parts (nbufs) as 32b int + 4 * nbufs bytes: offset for each buffer as integer as 32b int + + Offsets are from the start of the buffer, including the header. + + Returns + ------- + message dictionary + """ + nbufs = struct.unpack("!i", bmsg[:4])[0] + offsets = list(struct.unpack("!" + "I" * nbufs, bmsg[4 : 4 * (nbufs + 1)])) + offsets.append(None) + bufs = [] + for start, stop in zip(offsets[:-1], offsets[1:]): + bufs.append(bmsg[start:stop]) + msg = json.loads(bufs[0].decode("utf8")) + msg["header"] = extract_dates(msg["header"]) + msg["parent_header"] = extract_dates(msg["parent_header"]) + msg["buffers"] = bufs[1:] + return msg + + +def serialize_msg_to_ws_v1(msg_or_list, channel, pack=None): + """Serialize a message using the v1 protocol.""" + if pack: + msg_list = [ + pack(msg_or_list["header"]), + pack(msg_or_list["parent_header"]), + pack(msg_or_list["metadata"]), + pack(msg_or_list["content"]), + ] + else: + msg_list = msg_or_list + channel = channel.encode("utf-8") + offsets: List[Any] = [] + offsets.append(8 * (1 + 1 + len(msg_list) + 1)) + offsets.append(len(channel) + offsets[-1]) + for msg in msg_list: + offsets.append(len(msg) + offsets[-1]) + offset_number = len(offsets).to_bytes(8, byteorder="little") + offsets = [offset.to_bytes(8, byteorder="little") for offset in offsets] + bin_msg = b"".join([offset_number, *offsets, channel, *msg_list]) + return bin_msg + + +def deserialize_msg_from_ws_v1(ws_msg): + """Deserialize a message using the v1 protocol.""" + offset_number = int.from_bytes(ws_msg[:8], "little") + offsets = [ + int.from_bytes(ws_msg[8 * (i + 1) : 8 * (i + 2)], "little") for i in range(offset_number) + ] + channel = ws_msg[offsets[0] : offsets[1]].decode("utf-8") + msg_list = [ws_msg[offsets[i] : offsets[i + 1]] for i in range(1, offset_number - 1)] + return channel, msg_list + + +class BaseKernelWebsocketConnection(LoggingConfigurable): + """A configurable base class for connecting Kernel WebSockets to ZMQ sockets.""" + + kernel_ws_protocol = Unicode( + None, + allow_none=True, + config=True, + help=_i18n( + "Preferred kernel message protocol over websocket to use (default: None). " + "If an empty string is passed, select the legacy protocol. If None, " + "the selected protocol will depend on what the front-end supports " + "(usually the most recent protocol supported by the back-end and the " + "front-end)." + ), + ) + + @property + def kernel_manager(self): + """The kernel manager.""" + return self.parent + + @property + def multi_kernel_manager(self): + """The multi kernel manager.""" + return self.kernel_manager.parent + + @property + def kernel_id(self): + """The kernel id.""" + return self.kernel_manager.kernel_id + + @property + def session_id(self): + """The session id.""" + return self.session.session + + kernel_info_timeout = Float() + + @default("kernel_info_timeout") + def _default_kernel_info_timeout(self): + return self.multi_kernel_manager.kernel_info_timeout + + session = Instance(klass=Session, config=True) + + @default("session") + def _default_session(self): + return Session(config=self.config) + + websocket_handler = Instance(WebSocketHandler) + + async def connect(self): + """Handle a connect.""" + raise NotImplementedError() + + async def disconnect(self): + """Handle a disconnect.""" + raise NotImplementedError() + + def handle_incoming_message(self, incoming_msg: str) -> None: + """Handle an incoming message.""" + raise NotImplementedError() + + def handle_outgoing_message(self, stream: str, outgoing_msg: List[Any]) -> None: + """Handle an outgoing message.""" + raise NotImplementedError() + + +KernelWebsocketConnectionABC.register(BaseKernelWebsocketConnection) diff --git a/jupyter_server/services/kernels/connection/channels.py b/jupyter_server/services/kernels/connection/channels.py new file mode 100644 index 0000000000..05b9f6954e --- /dev/null +++ b/jupyter_server/services/kernels/connection/channels.py @@ -0,0 +1,808 @@ +"""An implementation of a kernel connection.""" +from __future__ import annotations + +import asyncio +import json +import time +import typing as t +import weakref +from concurrent.futures import Future +from textwrap import dedent + +from jupyter_client import protocol_version as client_protocol_version # type:ignore[attr-defined] +from tornado import gen, web +from tornado.ioloop import IOLoop +from tornado.websocket import WebSocketClosedError +from traitlets import Any, Bool, Dict, Float, Instance, Int, List, Unicode, default + +try: + from jupyter_client.jsonutil import json_default +except ImportError: + from jupyter_client.jsonutil import date_default as json_default + +from jupyter_core.utils import ensure_async + +from jupyter_server.transutils import _i18n + +from ..websocket import KernelWebsocketHandler +from .abc import KernelWebsocketConnectionABC +from .base import ( + BaseKernelWebsocketConnection, + deserialize_binary_message, + deserialize_msg_from_ws_v1, + serialize_binary_message, + serialize_msg_to_ws_v1, +) + + +def _ensure_future(f): + """Wrap a concurrent future as an asyncio future if there is a running loop.""" + try: + asyncio.get_running_loop() + return asyncio.wrap_future(f) + except RuntimeError: + return f + + +class ZMQChannelsWebsocketConnection(BaseKernelWebsocketConnection): + """A Jupyter Server Websocket Connection""" + + limit_rate = Bool( + True, + config=True, + help=_i18n( + "Whether to limit the rate of IOPub messages (default: True). " + "If True, use iopub_msg_rate_limit, iopub_data_rate_limit and/or rate_limit_window " + "to tune the rate." + ), + ) + + iopub_msg_rate_limit = Float( + 1000, + config=True, + help=_i18n( + """(msgs/sec) + Maximum rate at which messages can be sent on iopub before they are + limited.""" + ), + ) + + iopub_data_rate_limit = Float( + 1000000, + config=True, + help=_i18n( + """(bytes/sec) + Maximum rate at which stream output can be sent on iopub before they are + limited.""" + ), + ) + + rate_limit_window = Float( + 3, + config=True, + help=_i18n( + """(sec) Time window used to + check the message and data rate limits.""" + ), + ) + + websocket_handler = Instance(KernelWebsocketHandler) + + @property + def write_message(self): + """Alias to the websocket handler's write_message method.""" + return self.websocket_handler.write_message + + # class-level registry of open sessions + # allows checking for conflict on session-id, + # which is used as a zmq identity and must be unique. + _open_sessions: dict[str, KernelWebsocketHandler] = {} + _open_sockets: t.MutableSet[ZMQChannelsWebsocketConnection] = weakref.WeakSet() + + _kernel_info_future: Future[t.Any] + _close_future: Future[t.Any] + + channels = Dict({}) + kernel_info_channel = Any(allow_none=True) + + _kernel_info_future = Instance(klass=Future) # type:ignore[assignment] + + @default("_kernel_info_future") + def _default_kernel_info_future(self): + """The default kernel info future.""" + return Future() + + _close_future = Instance(klass=Future) # type:ignore[assignment] + + @default("_close_future") + def _default_close_future(self): + """The default close future.""" + return Future() + + session_key = Unicode("") + + _iopub_window_msg_count = Int() + _iopub_window_byte_count = Int() + _iopub_msgs_exceeded = Bool(False) + _iopub_data_exceeded = Bool(False) + # Queue of (time stamp, byte count) + # Allows you to specify that the byte count should be lowered + # by a delta amount at some point in the future. + _iopub_window_byte_queue: List[t.Any] = List([]) + + @classmethod + async def close_all(cls): + """Tornado does not provide a way to close open sockets, so add one.""" + for connection in list(cls._open_sockets): + connection.disconnect() + await _ensure_future(connection._close_future) + + @property + def subprotocol(self): + """The sub protocol.""" + try: + protocol = self.websocket_handler.selected_subprotocol + except Exception: + protocol = None + return protocol + + def create_stream(self): + """Create a stream.""" + identity = self.session.bsession + for channel in ("iopub", "shell", "control", "stdin"): + meth = getattr(self.kernel_manager, "connect_" + channel) + self.channels[channel] = stream = meth(identity=identity) + stream.channel = channel + + def nudge(self): + """Nudge the zmq connections with kernel_info_requests + Returns a Future that will resolve when we have received + a shell or control reply and at least one iopub message, + ensuring that zmq subscriptions are established, + sockets are fully connected, and kernel is responsive. + Keeps retrying kernel_info_request until these are both received. + """ + # Do not nudge busy kernels as kernel info requests sent to shell are + # queued behind execution requests. + # nudging in this case would cause a potentially very long wait + # before connections are opened, + # plus it is *very* unlikely that a busy kernel will not finish + # establishing its zmq subscriptions before processing the next request. + if getattr(self.kernel_manager, "execution_state", None) == "busy": + self.log.debug("Nudge: not nudging busy kernel %s", self.kernel_id) + f: Future[t.Any] = Future() + f.set_result(None) + return _ensure_future(f) + # Use a transient shell channel to prevent leaking + # shell responses to the front-end. + shell_channel = self.kernel_manager.connect_shell() + # Use a transient control channel to prevent leaking + # control responses to the front-end. + control_channel = self.kernel_manager.connect_control() + # The IOPub used by the client, whose subscriptions we are verifying. + iopub_channel = self.channels["iopub"] + + info_future: Future[t.Any] = Future() + iopub_future: Future[t.Any] = Future() + both_done = gen.multi([info_future, iopub_future]) + + def finish(_=None): + """Ensure all futures are resolved + which in turn triggers cleanup + """ + for f in (info_future, iopub_future): + if not f.done(): + f.set_result(None) + + def cleanup(_=None): + """Common cleanup""" + loop.remove_timeout(nudge_handle) + iopub_channel.stop_on_recv() + if not shell_channel.closed(): + shell_channel.close() + if not control_channel.closed(): + control_channel.close() + + # trigger cleanup when both message futures are resolved + both_done.add_done_callback(cleanup) + + def on_shell_reply(msg): + """Handle nudge shell replies.""" + self.log.debug("Nudge: shell info reply received: %s", self.kernel_id) + if not info_future.done(): + self.log.debug("Nudge: resolving shell future: %s", self.kernel_id) + info_future.set_result(None) + + def on_control_reply(msg): + """Handle nudge control replies.""" + self.log.debug("Nudge: control info reply received: %s", self.kernel_id) + if not info_future.done(): + self.log.debug("Nudge: resolving control future: %s", self.kernel_id) + info_future.set_result(None) + + def on_iopub(msg): + """Handle nudge iopub replies.""" + self.log.debug("Nudge: IOPub received: %s", self.kernel_id) + if not iopub_future.done(): + iopub_channel.stop_on_recv() + self.log.debug("Nudge: resolving iopub future: %s", self.kernel_id) + iopub_future.set_result(None) + + iopub_channel.on_recv(on_iopub) + shell_channel.on_recv(on_shell_reply) + control_channel.on_recv(on_control_reply) + loop = IOLoop.current() + + # Nudge the kernel with kernel info requests until we get an IOPub message + def nudge(count): + """Nudge the kernel.""" + count += 1 + # check for stopped kernel + if self.kernel_id not in self.multi_kernel_manager: + self.log.debug("Nudge: cancelling on stopped kernel: %s", self.kernel_id) + finish() + return + + # check for closed zmq socket + if shell_channel.closed(): + self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) + finish() + return + + # check for closed zmq socket + if control_channel.closed(): + self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) + finish() + return + + if not both_done.done(): + log = self.log.warning if count % 10 == 0 else self.log.debug + log(f"Nudge: attempt {count} on kernel {self.kernel_id}") + self.session.send(shell_channel, "kernel_info_request") + self.session.send(control_channel, "kernel_info_request") + nonlocal nudge_handle # type: ignore[misc] + nudge_handle = loop.call_later(0.5, nudge, count) + + nudge_handle = loop.call_later(0, nudge, count=0) + + # resolve with a timeout if we get no response + future = gen.with_timeout(loop.time() + self.kernel_info_timeout, both_done) + # ensure we have no dangling resources or unresolved Futures in case of timeout + future.add_done_callback(finish) + return _ensure_future(future) + + async def _register_session(self): + """Ensure we aren't creating a duplicate session. + + If a previous identical session is still open, close it to avoid collisions. + This is likely due to a client reconnecting from a lost network connection, + where the socket on our side has not been cleaned up yet. + """ + self.session_key = f"{self.kernel_id}:{self.session.session}" + stale_handler = self._open_sessions.get(self.session_key) + if stale_handler: + self.log.warning("Replacing stale connection: %s", self.session_key) + stale_handler.close() + if ( + self.kernel_id in self.multi_kernel_manager + ): # only update open sessions if kernel is actively managed + self._open_sessions[self.session_key] = t.cast( + KernelWebsocketHandler, self.websocket_handler + ) + + async def prepare(self): + """Prepare a kernel connection.""" + # check session collision: + await self._register_session() + # then request kernel info, waiting up to a certain time before giving up. + # We don't want to wait forever, because browsers don't take it well when + # servers never respond to websocket connection requests. + + if hasattr(self.kernel_manager, "ready"): + ready = self.kernel_manager.ready + if not isinstance(ready, asyncio.Future): + ready = asyncio.wrap_future(ready) + try: + await ready + except Exception as e: + self.kernel_manager.execution_state = "dead" + self.kernel_manager.reason = str(e) + raise web.HTTPError(500, str(e)) from e + + t0 = time.time() + while not await ensure_async(self.kernel_manager.is_alive()): + await asyncio.sleep(0.1) + if (time.time() - t0) > self.multi_kernel_manager.kernel_info_timeout: + msg = "Kernel never reached an 'alive' state." + raise TimeoutError(msg) + + self.session.key = self.kernel_manager.session.key + future = self.request_kernel_info() + + def give_up(): + """Don't wait forever for the kernel to reply""" + if future.done(): + return + self.log.warning("Timeout waiting for kernel_info reply from %s", self.kernel_id) + future.set_result({}) + + loop = IOLoop.current() + loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up) + # actually wait for it + await asyncio.wrap_future(future) + + def connect(self): + """Handle a connection.""" + self.multi_kernel_manager.notify_connect(self.kernel_id) + + # on new connections, flush the message buffer + buffer_info = self.multi_kernel_manager.get_buffer(self.kernel_id, self.session_key) + if buffer_info and buffer_info["session_key"] == self.session_key: + self.log.info("Restoring connection for %s", self.session_key) + if self.multi_kernel_manager.ports_changed(self.kernel_id): + # If the kernel's ports have changed (some restarts trigger this) + # then reset the channels so nudge() is using the correct iopub channel + self.create_stream() + else: + # The kernel's ports have not changed; use the channels captured in the buffer + self.channels = buffer_info["channels"] + + connected = self.nudge() + + def replay(value): + replay_buffer = buffer_info["buffer"] + if replay_buffer: + self.log.info("Replaying %s buffered messages", len(replay_buffer)) + for channel, msg_list in replay_buffer: + stream = self.channels[channel] + self.handle_outgoing_message(stream, msg_list) + + connected.add_done_callback(replay) + else: + try: + self.create_stream() + connected = self.nudge() + except web.HTTPError as e: + # Do not log error if the kernel is already shutdown, + # as it's normal that it's not responding + try: + self.multi_kernel_manager.get_kernel(self.kernel_id) + self.log.error("Error opening stream: %s", e) + except KeyError: + pass + # WebSockets don't respond to traditional error codes so we + # close the connection. + for _, stream in self.channels.items(): + if not stream.closed(): + stream.close() + self.disconnect() + return None + + self.multi_kernel_manager.add_restart_callback(self.kernel_id, self.on_kernel_restarted) + self.multi_kernel_manager.add_restart_callback( + self.kernel_id, self.on_restart_failed, "dead" + ) + + def subscribe(value): + for _, stream in self.channels.items(): + stream.on_recv_stream(self.handle_outgoing_message) + + connected.add_done_callback(subscribe) + ZMQChannelsWebsocketConnection._open_sockets.add(self) + return connected + + def close(self): + """Close the connection.""" + return self.disconnect() + + def disconnect(self): + """Handle a disconnect.""" + self.log.debug("Websocket closed %s", self.session_key) + # unregister myself as an open session (only if it's really me) + if self._open_sessions.get(self.session_key) is self.websocket_handler: + self._open_sessions.pop(self.session_key) + + if self.kernel_id in self.multi_kernel_manager: + self.multi_kernel_manager.notify_disconnect(self.kernel_id) + self.multi_kernel_manager.remove_restart_callback( + self.kernel_id, + self.on_kernel_restarted, + ) + self.multi_kernel_manager.remove_restart_callback( + self.kernel_id, + self.on_restart_failed, + "dead", + ) + + # start buffering instead of closing if this was the last connection + if ( + self.kernel_id in self.multi_kernel_manager._kernel_connections + and self.multi_kernel_manager._kernel_connections[self.kernel_id] == 0 + ): + self.multi_kernel_manager.start_buffering( + self.kernel_id, self.session_key, self.channels + ) + ZMQChannelsWebsocketConnection._open_sockets.remove(self) + self._close_future.set_result(None) + return + + # This method can be called twice, once by self.kernel_died and once + # from the WebSocket close event. If the WebSocket connection is + # closed before the ZMQ streams are setup, they could be None. + for _, stream in self.channels.items(): + if stream is not None and not stream.closed(): + stream.on_recv(None) + stream.close() + + self.channels = {} + try: + ZMQChannelsWebsocketConnection._open_sockets.remove(self) + self._close_future.set_result(None) + except Exception: + pass + + def handle_incoming_message(self, incoming_msg: str) -> None: + """Handle incoming messages from Websocket to ZMQ Sockets.""" + ws_msg = incoming_msg + if not self.channels: + # already closed, ignore the message + self.log.debug("Received message on closed websocket %r", ws_msg) + return + + if self.subprotocol == "v1.kernel.websocket.jupyter.org": + channel, msg_list = deserialize_msg_from_ws_v1(ws_msg) + msg = { + "header": None, + } + else: + if isinstance(ws_msg, bytes): # type:ignore[unreachable] + msg = deserialize_binary_message(ws_msg) # type:ignore[unreachable] + else: + msg = json.loads(ws_msg) + msg_list = [] + channel = msg.pop("channel", None) + + if channel is None: + self.log.warning("No channel specified, assuming shell: %s", msg) + channel = "shell" + if channel not in self.channels: + self.log.warning("No such channel: %r", channel) + return + am = self.multi_kernel_manager.allowed_message_types + ignore_msg = False + if am: + msg["header"] = self.get_part("header", msg["header"], msg_list) + assert msg["header"] is not None + if msg["header"]["msg_type"] not in am: # type:ignore[unreachable] + self.log.warning( + 'Received message of type "%s", which is not allowed. Ignoring.' + % msg["header"]["msg_type"] + ) + ignore_msg = True + if not ignore_msg: + stream = self.channels[channel] + if self.subprotocol == "v1.kernel.websocket.jupyter.org": + self.session.send_raw(stream, msg_list) + else: + self.session.send(stream, msg) + + def handle_outgoing_message(self, stream: str, outgoing_msg: list[t.Any]) -> None: + """Handle the outgoing messages from ZMQ sockets to Websocket.""" + msg_list = outgoing_msg + _, fed_msg_list = self.session.feed_identities(msg_list) + + if self.subprotocol == "v1.kernel.websocket.jupyter.org": + msg = {"header": None, "parent_header": None, "content": None} + else: + msg = self.session.deserialize(fed_msg_list) + + if isinstance(stream, str): + stream = self.channels[stream] + + channel = getattr(stream, "channel", None) + parts = fed_msg_list[1:] + + self._on_error(channel, msg, parts) + + if self._limit_rate(channel, msg, parts): + return + + if self.subprotocol == "v1.kernel.websocket.jupyter.org": + self._on_zmq_reply(stream, parts) + else: + self._on_zmq_reply(stream, msg) + + def get_part(self, field, value, msg_list): + """Get a part of a message.""" + if value is None: + field2idx = { + "header": 0, + "parent_header": 1, + "content": 3, + } + value = self.session.unpack(msg_list[field2idx[field]]) + return value + + def _reserialize_reply(self, msg_or_list, channel=None): + """Reserialize a reply message using JSON. + + msg_or_list can be an already-deserialized msg dict or the zmq buffer list. + If it is the zmq list, it will be deserialized with self.session. + + This takes the msg list from the ZMQ socket and serializes the result for the websocket. + This method should be used by self._on_zmq_reply to build messages that can + be sent back to the browser. + + """ + if isinstance(msg_or_list, dict): + # already unpacked + msg = msg_or_list + else: + _, msg_list = self.session.feed_identities(msg_or_list) + msg = self.session.deserialize(msg_list) + if channel: + msg["channel"] = channel + if msg["buffers"]: + buf = serialize_binary_message(msg) + return buf + else: + return json.dumps(msg, default=json_default) + + def _on_zmq_reply(self, stream, msg_list): + """Handle a zmq reply.""" + # Sometimes this gets triggered when the on_close method is scheduled in the + # eventloop but hasn't been called. + if stream.closed(): + self.log.warning("zmq message arrived on closed channel") + self.disconnect() + return + channel = getattr(stream, "channel", None) + if self.subprotocol == "v1.kernel.websocket.jupyter.org": + bin_msg = serialize_msg_to_ws_v1(msg_list, channel) + self.write_message(bin_msg, binary=True) + else: + try: + msg = self._reserialize_reply(msg_list, channel=channel) + except Exception: + self.log.critical("Malformed message: %r" % msg_list, exc_info=True) + else: + try: + self.write_message(msg, binary=isinstance(msg, bytes)) + except WebSocketClosedError as e: + self.log.warning(str(e)) + + def request_kernel_info(self): + """send a request for kernel_info""" + try: + # check for previous request + future = self.kernel_manager._kernel_info_future + except AttributeError: + self.log.debug("Requesting kernel info from %s", self.kernel_id) + # Create a kernel_info channel to query the kernel protocol version. + # This channel will be closed after the kernel_info reply is received. + if self.kernel_info_channel is None: + self.kernel_info_channel = self.multi_kernel_manager.connect_shell(self.kernel_id) + assert self.kernel_info_channel is not None + self.kernel_info_channel.on_recv(self._handle_kernel_info_reply) + self.session.send(self.kernel_info_channel, "kernel_info_request") + # store the future on the kernel, so only one request is sent + self.kernel_manager._kernel_info_future = self._kernel_info_future + else: + if not future.done(): + self.log.debug("Waiting for pending kernel_info request") + future.add_done_callback(lambda f: self._finish_kernel_info(f.result())) + return _ensure_future(self._kernel_info_future) + + def _handle_kernel_info_reply(self, msg): + """process the kernel_info_reply + + enabling msg spec adaptation, if necessary + """ + idents, msg = self.session.feed_identities(msg) + try: + msg = self.session.deserialize(msg) + except BaseException: + self.log.error("Bad kernel_info reply", exc_info=True) + self._kernel_info_future.set_result({}) + return + else: + info = msg["content"] + self.log.debug("Received kernel info: %s", info) + if msg["msg_type"] != "kernel_info_reply" or "protocol_version" not in info: + self.log.error("Kernel info request failed, assuming current %s", info) + info = {} + self._finish_kernel_info(info) + + # close the kernel_info channel, we don't need it anymore + if self.kernel_info_channel: + self.kernel_info_channel.close() + self.kernel_info_channel = None + + def _finish_kernel_info(self, info): + """Finish handling kernel_info reply + + Set up protocol adaptation, if needed, + and signal that connection can continue. + """ + protocol_version = info.get("protocol_version", client_protocol_version) + if protocol_version != client_protocol_version: + self.session.adapt_version = int(protocol_version.split(".")[0]) + self.log.info( + f"Adapting from protocol version {protocol_version} (kernel {self.kernel_id}) to {client_protocol_version} (client)." + ) + if not self._kernel_info_future.done(): + self._kernel_info_future.set_result(info) + + def write_stderr(self, error_message, parent_header): + """Write a message to stderr.""" + self.log.warning(error_message) + err_msg = self.session.msg( + "stream", + content={"text": error_message + "\n", "name": "stderr"}, + parent=parent_header, + ) + if self.subprotocol == "v1.kernel.websocket.jupyter.org": + bin_msg = serialize_msg_to_ws_v1(err_msg, "iopub", self.session.pack) + self.write_message(bin_msg, binary=True) + else: + err_msg["channel"] = "iopub" + self.write_message(json.dumps(err_msg, default=json_default)) + + def _limit_rate(self, channel, msg, msg_list): + """Limit the message rate on a channel.""" + if not (self.limit_rate and channel == "iopub"): + return False + + msg["header"] = self.get_part("header", msg["header"], msg_list) + + msg_type = msg["header"]["msg_type"] + if msg_type == "status": + msg["content"] = self.get_part("content", msg["content"], msg_list) + if msg["content"].get("execution_state") == "idle": + # reset rate limit counter on status=idle, + # to avoid 'Run All' hitting limits prematurely. + self._iopub_window_byte_queue = [] + self._iopub_window_msg_count = 0 + self._iopub_window_byte_count = 0 + self._iopub_msgs_exceeded = False + self._iopub_data_exceeded = False + + if msg_type not in {"status", "comm_open", "execute_input"}: + # Remove the counts queued for removal. + now = IOLoop.current().time() + while len(self._iopub_window_byte_queue) > 0: + queued = self._iopub_window_byte_queue[0] + if now >= queued[0]: + self._iopub_window_byte_count -= queued[1] + self._iopub_window_msg_count -= 1 + del self._iopub_window_byte_queue[0] + else: + # This part of the queue hasn't be reached yet, so we can + # abort the loop. + break + + # Increment the bytes and message count + self._iopub_window_msg_count += 1 + byte_count = sum(len(x) for x in msg_list) if msg_type == "stream" else 0 + self._iopub_window_byte_count += byte_count + + # Queue a removal of the byte and message count for a time in the + # future, when we are no longer interested in it. + self._iopub_window_byte_queue.append((now + self.rate_limit_window, byte_count)) + + # Check the limits, set the limit flags, and reset the + # message and data counts. + msg_rate = float(self._iopub_window_msg_count) / self.rate_limit_window + data_rate = float(self._iopub_window_byte_count) / self.rate_limit_window + + # Check the msg rate + if self.iopub_msg_rate_limit > 0 and msg_rate > self.iopub_msg_rate_limit: + if not self._iopub_msgs_exceeded: + self._iopub_msgs_exceeded = True + msg["parent_header"] = self.get_part( + "parent_header", msg["parent_header"], msg_list + ) + self.write_stderr( + dedent( + f"""\ + IOPub message rate exceeded. + The Jupyter server will temporarily stop sending output + to the client in order to avoid crashing it. + To change this limit, set the config variable + `--ServerApp.iopub_msg_rate_limit`. + + Current values: + ServerApp.iopub_msg_rate_limit={self.iopub_msg_rate_limit} (msgs/sec) + ServerApp.rate_limit_window={self.rate_limit_window} (secs) + """ + ), + msg["parent_header"], + ) + # resume once we've got some headroom below the limit + elif self._iopub_msgs_exceeded and msg_rate < (0.8 * self.iopub_msg_rate_limit): + self._iopub_msgs_exceeded = False + if not self._iopub_data_exceeded: + self.log.warning("iopub messages resumed") + + # Check the data rate + if self.iopub_data_rate_limit > 0 and data_rate > self.iopub_data_rate_limit: + if not self._iopub_data_exceeded: + self._iopub_data_exceeded = True + msg["parent_header"] = self.get_part( + "parent_header", msg["parent_header"], msg_list + ) + self.write_stderr( + dedent( + f"""\ + IOPub data rate exceeded. + The Jupyter server will temporarily stop sending output + to the client in order to avoid crashing it. + To change this limit, set the config variable + `--ServerApp.iopub_data_rate_limit`. + + Current values: + ServerApp.iopub_data_rate_limit={self.iopub_data_rate_limit} (bytes/sec) + ServerApp.rate_limit_window={self.rate_limit_window} (secs) + """ + ), + msg["parent_header"], + ) + # resume once we've got some headroom below the limit + elif self._iopub_data_exceeded and data_rate < (0.8 * self.iopub_data_rate_limit): + self._iopub_data_exceeded = False + if not self._iopub_msgs_exceeded: + self.log.warning("iopub messages resumed") + + # If either of the limit flags are set, do not send the message. + if self._iopub_msgs_exceeded or self._iopub_data_exceeded: + # we didn't send it, remove the current message from the calculus + self._iopub_window_msg_count -= 1 + self._iopub_window_byte_count -= byte_count + self._iopub_window_byte_queue.pop(-1) + return True + + return False + + def _send_status_message(self, status): + """Send a status message.""" + iopub = self.channels.get("iopub", None) + if iopub and not iopub.closed(): + # flush IOPub before sending a restarting/dead status message + # ensures proper ordering on the IOPub channel + # that all messages from the stopped kernel have been delivered + iopub.flush() + msg = self.session.msg("status", {"execution_state": status}) + if self.subprotocol == "v1.kernel.websocket.jupyter.org": + bin_msg = serialize_msg_to_ws_v1(msg, "iopub", self.session.pack) + self.write_message(bin_msg, binary=True) + else: + msg["channel"] = "iopub" + self.write_message(json.dumps(msg, default=json_default)) + + def on_kernel_restarted(self): + """Handle a kernel restart.""" + self.log.warning("kernel %s restarted", self.kernel_id) + self._send_status_message("restarting") + + def on_restart_failed(self): + """Handle a kernel restart failure.""" + self.log.error("kernel %s restarted failed!", self.kernel_id) + self._send_status_message("dead") + + def _on_error(self, channel, msg, msg_list): + """Handle an error message.""" + if self.multi_kernel_manager.allow_tracebacks: + return + + if channel == "iopub": + msg["header"] = self.get_part("header", msg["header"], msg_list) + if msg["header"]["msg_type"] == "error": + msg["content"] = self.get_part("content", msg["content"], msg_list) + msg["content"]["ename"] = "ExecutionError" + msg["content"]["evalue"] = "Execution error" + msg["content"]["traceback"] = [self.kernel_manager.traceback_replacement_message] + if self.subprotocol == "v1.kernel.websocket.jupyter.org": + msg_list[3] = self.session.pack(msg["content"]) + + +KernelWebsocketConnectionABC.register(ZMQChannelsWebsocketConnection) diff --git a/jupyter_server/services/kernels/handlers.py b/jupyter_server/services/kernels/handlers.py index c5fd110fa9..217f0c9cc2 100644 --- a/jupyter_server/services/kernels/handlers.py +++ b/jupyter_server/services/kernels/handlers.py @@ -5,42 +5,37 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json -from textwrap import dedent -from traceback import format_tb - -from jupyter_client import protocol_version as client_protocol_version try: from jupyter_client.jsonutil import json_default except ImportError: from jupyter_client.jsonutil import date_default as json_default -from tornado import gen, web -from tornado.concurrent import Future -from tornado.ioloop import IOLoop +from jupyter_core.utils import ensure_async +from tornado import web -from jupyter_server.auth import authorized -from jupyter_server.utils import ensure_async, url_escape, url_path_join +from jupyter_server.auth.decorator import authorized +from jupyter_server.utils import url_escape, url_path_join from ...base.handlers import APIHandler -from ...base.zmqhandlers import ( - AuthenticatedZMQStreamHandler, - deserialize_binary_message, - deserialize_msg_from_ws_v1, - serialize_msg_to_ws_v1, -) +from .websocket import KernelWebsocketHandler AUTH_RESOURCE = "kernels" class KernelsAPIHandler(APIHandler): + """A kernels API handler.""" + auth_resource = AUTH_RESOURCE class MainKernelHandler(KernelsAPIHandler): + """The root kernel handler.""" + @web.authenticated @authorized async def get(self): + """Get the list of running kernels.""" km = self.kernel_manager kernels = await ensure_async(km.list_kernels()) self.finish(json.dumps(kernels, default=json_default)) @@ -48,6 +43,7 @@ async def get(self): @web.authenticated @authorized async def post(self): + """Start a kernel.""" km = self.kernel_manager model = self.get_json_body() if model is None: @@ -55,7 +51,11 @@ async def post(self): else: model.setdefault("name", km.default_kernel_name) - kernel_id = await km.start_kernel(kernel_name=model["name"], path=model.get("path")) + kernel_id = await ensure_async( + km.start_kernel( # type:ignore[has-type] + kernel_name=model["name"], path=model.get("path") + ) + ) model = await ensure_async(km.kernel_model(kernel_id)) location = url_path_join(self.base_url, "api", "kernels", url_escape(kernel_id)) self.set_header("Location", location) @@ -64,9 +64,12 @@ async def post(self): class KernelHandler(KernelsAPIHandler): + """A kernel API handler.""" + @web.authenticated @authorized async def get(self, kernel_id): + """Get a kernel model.""" km = self.kernel_manager model = await ensure_async(km.kernel_model(kernel_id)) self.finish(json.dumps(model, default=json_default)) @@ -74,6 +77,7 @@ async def get(self, kernel_id): @web.authenticated @authorized async def delete(self, kernel_id): + """Remove a kernel.""" km = self.kernel_manager await ensure_async(km.shutdown_kernel(kernel_id)) self.set_status(204) @@ -81,22 +85,23 @@ async def delete(self, kernel_id): class KernelActionHandler(KernelsAPIHandler): + """A kernel action API handler.""" + @web.authenticated @authorized async def post(self, kernel_id, action): + """Interrupt or restart a kernel.""" km = self.kernel_manager if action == "interrupt": - await ensure_async(km.interrupt_kernel(kernel_id)) + await ensure_async(km.interrupt_kernel(kernel_id)) # type:ignore[func-returns-value] self.set_status(204) if action == "restart": - try: await km.restart_kernel(kernel_id) except Exception as e: message = "Exception restarting kernel" self.log.error(message, exc_info=True) - traceback = format_tb(e.__traceback__) - self.write(json.dumps(dict(message=message, traceback=traceback))) + self.write(json.dumps({"message": message, "traceback": ""})) self.set_status(500) else: model = await ensure_async(km.kernel_model(kernel_id)) @@ -104,678 +109,9 @@ async def post(self, kernel_id, action): self.finish() -class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): - """There is one ZMQChannelsHandler per running kernel and it oversees all - the sessions. - """ - - auth_resource = AUTH_RESOURCE - - # class-level registry of open sessions - # allows checking for conflict on session-id, - # which is used as a zmq identity and must be unique. - _open_sessions = {} - - @property - def kernel_info_timeout(self): - km_default = self.kernel_manager.kernel_info_timeout - return self.settings.get("kernel_info_timeout", km_default) - - @property - def limit_rate(self): - return self.settings.get("limit_rate", True) - - @property - def iopub_msg_rate_limit(self): - return self.settings.get("iopub_msg_rate_limit", 0) - - @property - def iopub_data_rate_limit(self): - return self.settings.get("iopub_data_rate_limit", 0) - - @property - def rate_limit_window(self): - return self.settings.get("rate_limit_window", 1.0) - - @property - def subprotocol(self): - try: - protocol = self.selected_subprotocol - except Exception: - protocol = None - return protocol - - def __repr__(self): - return "{}({})".format( - self.__class__.__name__, - getattr(self, "kernel_id", "uninitialized"), - ) - - def create_stream(self): - km = self.kernel_manager - identity = self.session.bsession - for channel in ("iopub", "shell", "control", "stdin"): - meth = getattr(km, "connect_" + channel) - self.channels[channel] = stream = meth(self.kernel_id, identity=identity) - stream.channel = channel - - def nudge(self): - """Nudge the zmq connections with kernel_info_requests - Returns a Future that will resolve when we have received - a shell or control reply and at least one iopub message, - ensuring that zmq subscriptions are established, - sockets are fully connected, and kernel is responsive. - Keeps retrying kernel_info_request until these are both received. - """ - kernel = self.kernel_manager.get_kernel(self.kernel_id) - - # Do not nudge busy kernels as kernel info requests sent to shell are - # queued behind execution requests. - # nudging in this case would cause a potentially very long wait - # before connections are opened, - # plus it is *very* unlikely that a busy kernel will not finish - # establishing its zmq subscriptions before processing the next request. - if getattr(kernel, "execution_state", None) == "busy": - self.log.debug("Nudge: not nudging busy kernel %s", self.kernel_id) - f = Future() - f.set_result(None) - return f - # Use a transient shell channel to prevent leaking - # shell responses to the front-end. - shell_channel = kernel.connect_shell() - # Use a transient control channel to prevent leaking - # control responses to the front-end. - control_channel = kernel.connect_control() - # The IOPub used by the client, whose subscriptions we are verifying. - iopub_channel = self.channels["iopub"] - - info_future = Future() - iopub_future = Future() - both_done = gen.multi([info_future, iopub_future]) - - def finish(_=None): - """Ensure all futures are resolved - which in turn triggers cleanup - """ - for f in (info_future, iopub_future): - if not f.done(): - f.set_result(None) - - def cleanup(_=None): - """Common cleanup""" - loop.remove_timeout(nudge_handle) - iopub_channel.stop_on_recv() - if not shell_channel.closed(): - shell_channel.close() - if not control_channel.closed(): - control_channel.close() - - # trigger cleanup when both message futures are resolved - both_done.add_done_callback(cleanup) - - def on_shell_reply(msg): - self.log.debug("Nudge: shell info reply received: %s", self.kernel_id) - if not info_future.done(): - self.log.debug("Nudge: resolving shell future: %s", self.kernel_id) - info_future.set_result(None) - - def on_control_reply(msg): - self.log.debug("Nudge: control info reply received: %s", self.kernel_id) - if not info_future.done(): - self.log.debug("Nudge: resolving control future: %s", self.kernel_id) - info_future.set_result(None) - - def on_iopub(msg): - self.log.debug("Nudge: IOPub received: %s", self.kernel_id) - if not iopub_future.done(): - iopub_channel.stop_on_recv() - self.log.debug("Nudge: resolving iopub future: %s", self.kernel_id) - iopub_future.set_result(None) - - iopub_channel.on_recv(on_iopub) - shell_channel.on_recv(on_shell_reply) - control_channel.on_recv(on_control_reply) - loop = IOLoop.current() - - # Nudge the kernel with kernel info requests until we get an IOPub message - def nudge(count): - count += 1 - - # NOTE: this close check appears to never be True during on_open, - # even when the peer has closed the connection - if self.ws_connection is None or self.ws_connection.is_closing(): - self.log.debug("Nudge: cancelling on closed websocket: %s", self.kernel_id) - finish() - return - - # check for stopped kernel - if self.kernel_id not in self.kernel_manager: - self.log.debug("Nudge: cancelling on stopped kernel: %s", self.kernel_id) - finish() - return - - # check for closed zmq socket - if shell_channel.closed(): - self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) - finish() - return - - # check for closed zmq socket - if control_channel.closed(): - self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) - finish() - return - - if not both_done.done(): - log = self.log.warning if count % 10 == 0 else self.log.debug - log(f"Nudge: attempt {count} on kernel {self.kernel_id}") - self.session.send(shell_channel, "kernel_info_request") - self.session.send(control_channel, "kernel_info_request") - nonlocal nudge_handle - nudge_handle = loop.call_later(0.5, nudge, count) - - nudge_handle = loop.call_later(0, nudge, count=0) - - # resolve with a timeout if we get no response - future = gen.with_timeout(loop.time() + self.kernel_info_timeout, both_done) - # ensure we have no dangling resources or unresolved Futures in case of timeout - future.add_done_callback(finish) - return future - - def request_kernel_info(self): - """send a request for kernel_info""" - km = self.kernel_manager - kernel = km.get_kernel(self.kernel_id) - try: - # check for previous request - future = kernel._kernel_info_future - except AttributeError: - self.log.debug("Requesting kernel info from %s", self.kernel_id) - # Create a kernel_info channel to query the kernel protocol version. - # This channel will be closed after the kernel_info reply is received. - if self.kernel_info_channel is None: - self.kernel_info_channel = km.connect_shell(self.kernel_id) - self.kernel_info_channel.on_recv(self._handle_kernel_info_reply) - self.session.send(self.kernel_info_channel, "kernel_info_request") - # store the future on the kernel, so only one request is sent - kernel._kernel_info_future = self._kernel_info_future - else: - if not future.done(): - self.log.debug("Waiting for pending kernel_info request") - future.add_done_callback(lambda f: self._finish_kernel_info(f.result())) - return self._kernel_info_future - - def _handle_kernel_info_reply(self, msg): - """process the kernel_info_reply - - enabling msg spec adaptation, if necessary - """ - idents, msg = self.session.feed_identities(msg) - try: - msg = self.session.deserialize(msg) - except BaseException: - self.log.error("Bad kernel_info reply", exc_info=True) - self._kernel_info_future.set_result({}) - return - else: - info = msg["content"] - self.log.debug("Received kernel info: %s", info) - if msg["msg_type"] != "kernel_info_reply" or "protocol_version" not in info: - self.log.error("Kernel info request failed, assuming current %s", info) - info = {} - self._finish_kernel_info(info) - - # close the kernel_info channel, we don't need it anymore - if self.kernel_info_channel: - self.kernel_info_channel.close() - self.kernel_info_channel = None - - def _finish_kernel_info(self, info): - """Finish handling kernel_info reply - - Set up protocol adaptation, if needed, - and signal that connection can continue. - """ - protocol_version = info.get("protocol_version", client_protocol_version) - if protocol_version != client_protocol_version: - self.session.adapt_version = int(protocol_version.split(".")[0]) - self.log.info( - "Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format( - protocol_version=protocol_version, - kernel_id=self.kernel_id, - client_protocol_version=client_protocol_version, - ) - ) - if not self._kernel_info_future.done(): - self._kernel_info_future.set_result(info) - - def initialize(self): - super().initialize() - self.zmq_stream = None - self.channels = {} - self.kernel_id = None - self.kernel_info_channel = None - self._kernel_info_future = Future() - self._close_future = Future() - self.session_key = "" - - # Rate limiting code - self._iopub_window_msg_count = 0 - self._iopub_window_byte_count = 0 - self._iopub_msgs_exceeded = False - self._iopub_data_exceeded = False - # Queue of (time stamp, byte count) - # Allows you to specify that the byte count should be lowered - # by a delta amount at some point in the future. - self._iopub_window_byte_queue = [] - - async def pre_get(self): - # authenticate first - super().pre_get() - # check session collision: - await self._register_session() - # then request kernel info, waiting up to a certain time before giving up. - # We don't want to wait forever, because browsers don't take it well when - # servers never respond to websocket connection requests. - kernel = self.kernel_manager.get_kernel(self.kernel_id) - - if hasattr(kernel, "ready"): - try: - await kernel.ready - except Exception as e: - kernel.execution_state = "dead" - kernel.reason = str(e) - raise web.HTTPError(500, str(e)) from e - - self.session.key = kernel.session.key - future = self.request_kernel_info() - - def give_up(): - """Don't wait forever for the kernel to reply""" - if future.done(): - return - self.log.warning("Timeout waiting for kernel_info reply from %s", self.kernel_id) - future.set_result({}) - - loop = IOLoop.current() - loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up) - # actually wait for it - await future - - async def get(self, kernel_id): - self.kernel_id = kernel_id - await super().get(kernel_id=kernel_id) - - async def _register_session(self): - """Ensure we aren't creating a duplicate session. - - If a previous identical session is still open, close it to avoid collisions. - This is likely due to a client reconnecting from a lost network connection, - where the socket on our side has not been cleaned up yet. - """ - self.session_key = f"{self.kernel_id}:{self.session.session}" - stale_handler = self._open_sessions.get(self.session_key) - if stale_handler: - self.log.warning("Replacing stale connection: %s", self.session_key) - await stale_handler.close() - if ( - self.kernel_id in self.kernel_manager - ): # only update open sessions if kernel is actively managed - self._open_sessions[self.session_key] = self - - def open(self, kernel_id): - super().open() - km = self.kernel_manager - km.notify_connect(kernel_id) - - # on new connections, flush the message buffer - buffer_info = km.get_buffer(kernel_id, self.session_key) - if buffer_info and buffer_info["session_key"] == self.session_key: - self.log.info("Restoring connection for %s", self.session_key) - if km.ports_changed(kernel_id): - # If the kernel's ports have changed (some restarts trigger this) - # then reset the channels so nudge() is using the correct iopub channel - self.create_stream() - else: - # The kernel's ports have not changed; use the channels captured in the buffer - self.channels = buffer_info["channels"] - - connected = self.nudge() - - def replay(value): - replay_buffer = buffer_info["buffer"] - if replay_buffer: - self.log.info("Replaying %s buffered messages", len(replay_buffer)) - for channel, msg_list in replay_buffer: - stream = self.channels[channel] - self._on_zmq_reply(stream, msg_list) - - connected.add_done_callback(replay) - else: - try: - self.create_stream() - connected = self.nudge() - except web.HTTPError as e: - # Do not log error if the kernel is already shutdown, - # as it's normal that it's not responding - try: - self.kernel_manager.get_kernel(kernel_id) - - self.log.error("Error opening stream: %s", e) - except KeyError: - pass - # WebSockets don't respond to traditional error codes so we - # close the connection. - for _, stream in self.channels.items(): - if not stream.closed(): - stream.close() - self.close() - return - - km.add_restart_callback(self.kernel_id, self.on_kernel_restarted) - km.add_restart_callback(self.kernel_id, self.on_restart_failed, "dead") - - def subscribe(value): - for _, stream in self.channels.items(): - stream.on_recv_stream(self._on_zmq_reply) - - connected.add_done_callback(subscribe) - - return connected - - def on_message(self, ws_msg): - if not self.channels: - # already closed, ignore the message - self.log.debug("Received message on closed websocket %r", ws_msg) - return - - if self.subprotocol == "v1.kernel.websocket.jupyter.org": - channel, msg_list = deserialize_msg_from_ws_v1(ws_msg) - msg = { - "header": None, - } - else: - if isinstance(ws_msg, bytes): - msg = deserialize_binary_message(ws_msg) - else: - msg = json.loads(ws_msg) - msg_list = [] - channel = msg.pop("channel", None) - - if channel is None: - self.log.warning("No channel specified, assuming shell: %s", msg) - channel = "shell" - if channel not in self.channels: - self.log.warning("No such channel: %r", channel) - return - am = self.kernel_manager.allowed_message_types - ignore_msg = False - if am: - msg["header"] = self.get_part("header", msg["header"], msg_list) - if msg["header"]["msg_type"] not in am: - self.log.warning( - 'Received message of type "%s", which is not allowed. Ignoring.' - % msg["header"]["msg_type"] - ) - ignore_msg = True - if not ignore_msg: - stream = self.channels[channel] - if self.subprotocol == "v1.kernel.websocket.jupyter.org": - self.session.send_raw(stream, msg_list) - else: - self.session.send(stream, msg) - - def get_part(self, field, value, msg_list): - if value is None: - field2idx = { - "header": 0, - "parent_header": 1, - "content": 3, - } - value = self.session.unpack(msg_list[field2idx[field]]) - return value - - def _on_zmq_reply(self, stream, msg_list): - idents, fed_msg_list = self.session.feed_identities(msg_list) - - if self.subprotocol == "v1.kernel.websocket.jupyter.org": - msg = {"header": None, "parent_header": None, "content": None} - else: - msg = self.session.deserialize(fed_msg_list) - - channel = getattr(stream, "channel", None) - parts = fed_msg_list[1:] - - self._on_error(channel, msg, parts) - - if self._limit_rate(channel, msg, parts): - return - - if self.subprotocol == "v1.kernel.websocket.jupyter.org": - super()._on_zmq_reply(stream, parts) - else: - super()._on_zmq_reply(stream, msg) - - def write_stderr(self, error_message, parent_header): - self.log.warning(error_message) - err_msg = self.session.msg( - "stream", - content={"text": error_message + "\n", "name": "stderr"}, - parent=parent_header, - ) - if self.subprotocol == "v1.kernel.websocket.jupyter.org": - bin_msg = serialize_msg_to_ws_v1(err_msg, "iopub", self.session.pack) - self.write_message(bin_msg, binary=True) - else: - err_msg["channel"] = "iopub" - self.write_message(json.dumps(err_msg, default=json_default)) - - def _limit_rate(self, channel, msg, msg_list): - if not (self.limit_rate and channel == "iopub"): - return False - - msg["header"] = self.get_part("header", msg["header"], msg_list) - - msg_type = msg["header"]["msg_type"] - if msg_type == "status": - msg["content"] = self.get_part("content", msg["content"], msg_list) - if msg["content"].get("execution_state") == "idle": - # reset rate limit counter on status=idle, - # to avoid 'Run All' hitting limits prematurely. - self._iopub_window_byte_queue = [] - self._iopub_window_msg_count = 0 - self._iopub_window_byte_count = 0 - self._iopub_msgs_exceeded = False - self._iopub_data_exceeded = False - - if msg_type not in {"status", "comm_open", "execute_input"}: - # Remove the counts queued for removal. - now = IOLoop.current().time() - while len(self._iopub_window_byte_queue) > 0: - queued = self._iopub_window_byte_queue[0] - if now >= queued[0]: - self._iopub_window_byte_count -= queued[1] - self._iopub_window_msg_count -= 1 - del self._iopub_window_byte_queue[0] - else: - # This part of the queue hasn't be reached yet, so we can - # abort the loop. - break - - # Increment the bytes and message count - self._iopub_window_msg_count += 1 - if msg_type == "stream": - byte_count = sum(len(x) for x in msg_list) - else: - byte_count = 0 - self._iopub_window_byte_count += byte_count - - # Queue a removal of the byte and message count for a time in the - # future, when we are no longer interested in it. - self._iopub_window_byte_queue.append((now + self.rate_limit_window, byte_count)) - - # Check the limits, set the limit flags, and reset the - # message and data counts. - msg_rate = float(self._iopub_window_msg_count) / self.rate_limit_window - data_rate = float(self._iopub_window_byte_count) / self.rate_limit_window - - # Check the msg rate - if self.iopub_msg_rate_limit > 0 and msg_rate > self.iopub_msg_rate_limit: - if not self._iopub_msgs_exceeded: - self._iopub_msgs_exceeded = True - msg["parent_header"] = self.get_part( - "parent_header", msg["parent_header"], msg_list - ) - self.write_stderr( - dedent( - """\ - IOPub message rate exceeded. - The Jupyter server will temporarily stop sending output - to the client in order to avoid crashing it. - To change this limit, set the config variable - `--ServerApp.iopub_msg_rate_limit`. - - Current values: - ServerApp.iopub_msg_rate_limit={} (msgs/sec) - ServerApp.rate_limit_window={} (secs) - """.format( - self.iopub_msg_rate_limit, self.rate_limit_window - ) - ), - msg["parent_header"], - ) - else: - # resume once we've got some headroom below the limit - if self._iopub_msgs_exceeded and msg_rate < (0.8 * self.iopub_msg_rate_limit): - self._iopub_msgs_exceeded = False - if not self._iopub_data_exceeded: - self.log.warning("iopub messages resumed") - - # Check the data rate - if self.iopub_data_rate_limit > 0 and data_rate > self.iopub_data_rate_limit: - if not self._iopub_data_exceeded: - self._iopub_data_exceeded = True - msg["parent_header"] = self.get_part( - "parent_header", msg["parent_header"], msg_list - ) - self.write_stderr( - dedent( - """\ - IOPub data rate exceeded. - The Jupyter server will temporarily stop sending output - to the client in order to avoid crashing it. - To change this limit, set the config variable - `--ServerApp.iopub_data_rate_limit`. - - Current values: - ServerApp.iopub_data_rate_limit={} (bytes/sec) - ServerApp.rate_limit_window={} (secs) - """.format( - self.iopub_data_rate_limit, self.rate_limit_window - ) - ), - msg["parent_header"], - ) - else: - # resume once we've got some headroom below the limit - if self._iopub_data_exceeded and data_rate < (0.8 * self.iopub_data_rate_limit): - self._iopub_data_exceeded = False - if not self._iopub_msgs_exceeded: - self.log.warning("iopub messages resumed") - - # If either of the limit flags are set, do not send the message. - if self._iopub_msgs_exceeded or self._iopub_data_exceeded: - # we didn't send it, remove the current message from the calculus - self._iopub_window_msg_count -= 1 - self._iopub_window_byte_count -= byte_count - self._iopub_window_byte_queue.pop(-1) - return True - - return False - - def close(self): - super().close() - return self._close_future - - def on_close(self): - self.log.debug("Websocket closed %s", self.session_key) - # unregister myself as an open session (only if it's really me) - if self._open_sessions.get(self.session_key) is self: - self._open_sessions.pop(self.session_key) - - km = self.kernel_manager - if self.kernel_id in km: - km.notify_disconnect(self.kernel_id) - km.remove_restart_callback( - self.kernel_id, - self.on_kernel_restarted, - ) - km.remove_restart_callback( - self.kernel_id, - self.on_restart_failed, - "dead", - ) - - # start buffering instead of closing if this was the last connection - if km._kernel_connections[self.kernel_id] == 0: - km.start_buffering(self.kernel_id, self.session_key, self.channels) - self._close_future.set_result(None) - return - - # This method can be called twice, once by self.kernel_died and once - # from the WebSocket close event. If the WebSocket connection is - # closed before the ZMQ streams are setup, they could be None. - for _, stream in self.channels.items(): - if stream is not None and not stream.closed(): - stream.on_recv(None) - stream.close() - - self.channels = {} - self._close_future.set_result(None) - - def _send_status_message(self, status): - iopub = self.channels.get("iopub", None) - if iopub and not iopub.closed(): - # flush IOPub before sending a restarting/dead status message - # ensures proper ordering on the IOPub channel - # that all messages from the stopped kernel have been delivered - iopub.flush() - msg = self.session.msg("status", {"execution_state": status}) - if self.subprotocol == "v1.kernel.websocket.jupyter.org": - bin_msg = serialize_msg_to_ws_v1(msg, "iopub", self.session.pack) - self.write_message(bin_msg, binary=True) - else: - msg["channel"] = "iopub" - self.write_message(json.dumps(msg, default=json_default)) - - def on_kernel_restarted(self): - self.log.warning("kernel %s restarted", self.kernel_id) - self._send_status_message("restarting") - - def on_restart_failed(self): - self.log.error("kernel %s restarted failed!", self.kernel_id) - self._send_status_message("dead") - - def _on_error(self, channel, msg, msg_list): - if self.kernel_manager.allow_tracebacks: - return - - if channel == "iopub": - msg["header"] = self.get_part("header", msg["header"], msg_list) - if msg["header"]["msg_type"] == "error": - msg["content"] = self.get_part("content", msg["content"], msg_list) - msg["content"]["ename"] = "ExecutionError" - msg["content"]["evalue"] = "Execution error" - msg["content"]["traceback"] = [self.kernel_manager.traceback_replacement_message] - if self.subprotocol == "v1.kernel.websocket.jupyter.org": - msg_list[3] = self.session.pack(msg["content"]) - - # ----------------------------------------------------------------------------- # URL to handler mappings # ----------------------------------------------------------------------------- - - _kernel_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" _kernel_action_regex = r"(?Prestart|interrupt)" @@ -786,5 +122,5 @@ def _on_error(self, channel, msg, msg_list): rf"/api/kernels/{_kernel_id_regex}/{_kernel_action_regex}", KernelActionHandler, ), - (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler), + (r"/api/kernels/%s/channels" % _kernel_id_regex, KernelWebsocketHandler), ] diff --git a/jupyter_server/services/kernels/kernelmanager.py b/jupyter_server/services/kernels/kernelmanager.py index f9b9af23bd..451a279a4e 100644 --- a/jupyter_server/services/kernels/kernelmanager.py +++ b/jupyter_server/services/kernels/kernelmanager.py @@ -5,18 +5,25 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import asyncio import os +import pathlib +import typing as t +import warnings from collections import defaultdict from datetime import datetime, timedelta -from functools import partial +from functools import partial, wraps -from jupyter_client.multikernelmanager import ( - AsyncMultiKernelManager, - MultiKernelManager, -) +from jupyter_client.ioloop.manager import AsyncIOLoopKernelManager +from jupyter_client.multikernelmanager import AsyncMultiKernelManager, MultiKernelManager from jupyter_client.session import Session from jupyter_core.paths import exists +from jupyter_core.utils import ensure_async +from jupyter_events import EventLogger +from jupyter_events.schema_registry import SchemaRegistryException +from overrides import overrides from tornado import web from tornado.concurrent import Future from tornado.ioloop import IOLoop, PeriodicCallback @@ -34,9 +41,10 @@ validate, ) +from jupyter_server import DEFAULT_EVENTS_SCHEMA_PATH from jupyter_server._tz import isoformat, utcnow from jupyter_server.prometheus.metrics import KERNEL_CURRENTLY_RUNNING_TOTAL -from jupyter_server.utils import ensure_async, to_os_path +from jupyter_server.utils import ApiPath, import_item, to_os_path class MappingKernelManager(MultiKernelManager): @@ -56,7 +64,7 @@ def _default_kernel_manager_class(self): _kernel_connections = Dict() - _kernel_ports = Dict() + _kernel_ports: dict[str, list[int]] = Dict() # type: ignore[assignment] _culler_callback = None @@ -64,10 +72,9 @@ def _default_kernel_manager_class(self): @default("root_dir") def _default_root_dir(self): - try: - return self.parent.root_dir - except AttributeError: + if not self.parent: return os.getcwd() + return self.parent.root_dir @validate("root_dir") def _update_root_dir(self, proposal): @@ -148,7 +155,9 @@ def _default_kernel_buffers(self): ) def __init__(self, **kwargs): + """Initialize a kernel manager.""" self.pinned_superclass = MultiKernelManager + self._pending_kernel_tasks = {} self.pinned_superclass.__init__(self, **kwargs) self.last_kernel_activity = utcnow() @@ -179,7 +188,7 @@ def _handle_kernel_died(self, kernel_id): self.log.warning("Kernel %s died, removing from map.", kernel_id) self.remove_kernel(kernel_id) - def cwd_for_path(self, path): + def cwd_for_path(self, path, **kwargs): """Turn API path into absolute OS path.""" os_path = to_os_path(path, self.root_dir) # in the case of documents and kernels not being on the same filesystem, @@ -189,16 +198,21 @@ def cwd_for_path(self, path): return os_path async def _remove_kernel_when_ready(self, kernel_id, kernel_awaitable): + """Remove a kernel when it is ready.""" await super()._remove_kernel_when_ready(kernel_id, kernel_awaitable) self._kernel_connections.pop(kernel_id, None) self._kernel_ports.pop(kernel_id, None) - async def start_kernel(self, kernel_id=None, path=None, **kwargs): + # TODO DEC 2022: Revise the type-ignore once the signatures have been changed upstream + # https://github.com/jupyter/jupyter_client/pull/905 + async def _async_start_kernel( # type:ignore[override] + self, *, kernel_id: str | None = None, path: ApiPath | None = None, **kwargs: str + ) -> str: """Start a kernel for a session and return its kernel_id. Parameters ---------- - kernel_id : uuid + kernel_id : uuid (str) The uuid to associate the new kernel with. If this is not None, this kernel will be persistent whenever it is requested. @@ -211,42 +225,54 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): """ if kernel_id is None or kernel_id not in self: if path is not None: - kwargs["cwd"] = self.cwd_for_path(path) + kwargs["cwd"] = self.cwd_for_path(path, env=kwargs.get("env", {})) if kernel_id is not None: + assert kernel_id is not None, "Never Fail, but necessary for mypy " kwargs["kernel_id"] = kernel_id - kernel_id = await ensure_async(self.pinned_superclass.start_kernel(self, **kwargs)) + kernel_id = await self.pinned_superclass._async_start_kernel(self, **kwargs) self._kernel_connections[kernel_id] = 0 - fut = asyncio.ensure_future(self._finish_kernel_start(kernel_id)) + task = asyncio.create_task(self._finish_kernel_start(kernel_id)) if not getattr(self, "use_pending_kernels", None): - await fut + await task + else: + self._pending_kernel_tasks[kernel_id] = task # add busy/activity markers: kernel = self.get_kernel(kernel_id) - kernel.execution_state = "starting" - kernel.reason = "" - kernel.last_activity = utcnow() - self.log.info("Kernel started: %s" % kernel_id) - self.log.debug("Kernel args: %r" % kwargs) + kernel.execution_state = "starting" # type:ignore[attr-defined] + kernel.reason = "" # type:ignore[attr-defined] + kernel.last_activity = utcnow() # type:ignore[attr-defined] + self.log.info("Kernel started: %s", kernel_id) + self.log.debug("Kernel args: %r", kwargs) # Increase the metric of number of kernels running # for the relevant kernel type by 1 KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).inc() else: - self.log.info("Using existing kernel: %s" % kernel_id) + self.log.info("Using existing kernel: %s", kernel_id) # Initialize culling if not already if not self._initialized_culler: self.initialize_culler() - + assert kernel_id is not None return kernel_id + # see https://github.com/jupyter-server/jupyter_server/issues/1165 + # this assignment is technically incorrect, but might need a change of API + # in jupyter_client. + start_kernel = _async_start_kernel # type:ignore[assignment] + async def _finish_kernel_start(self, kernel_id): + """Handle a kernel that finishes starting.""" km = self.get_kernel(kernel_id) if hasattr(km, "ready"): + ready = km.ready + if not isinstance(ready, asyncio.Future): + ready = asyncio.wrap_future(ready) try: - await km.ready + await ready except Exception: - self.log.exception(km.ready.exception()) + self.log.exception("Error waiting for kernel manager ready") return self._kernel_ports[kernel_id] = km.ports @@ -273,7 +299,7 @@ def ports_changed(self, kernel_id): changed_ports = self._get_changed_ports(kernel_id) if changed_ports: # If changed, update captured ports and return True, else return False. - self.log.debug(f"Port change detected for kernel: {kernel_id}") + self.log.debug("Port change detected for kernel: %s", kernel_id) self._kernel_ports[kernel_id] = changed_ports return True return False @@ -288,6 +314,8 @@ def _get_changed_ports(self, kernel_id): """ # Get current ports and return comparison with ports captured at startup. km = self.get_kernel(kernel_id) + assert isinstance(km.ports, list) + assert isinstance(self._kernel_ports[kernel_id], list) if km.ports != self._kernel_ports[kernel_id]: return km.ports return None @@ -345,7 +373,7 @@ def get_buffer(self, kernel_id, session_key): """ self.log.debug("Getting buffer for %s", kernel_id) if kernel_id not in self._kernel_buffers: - return + return None buffer_info = self._kernel_buffers[kernel_id] if buffer_info["session_key"] == session_key: @@ -372,7 +400,7 @@ def stop_buffering(self, kernel_id): buffer_info = self._kernel_buffers.pop(kernel_id) # close buffering streams for stream in buffer_info["channels"].values(): - if not stream.closed(): + if not stream.socket.closed: stream.on_recv(None) stream.close() @@ -384,33 +412,43 @@ def stop_buffering(self, kernel_id): buffer_info["session_key"], ) - def shutdown_kernel(self, kernel_id, now=False, restart=False): + async def _async_shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by kernel_id""" self._check_kernel_id(kernel_id) - self.stop_watching_activity(kernel_id) - self.stop_buffering(kernel_id) # Decrease the metric of number of kernels # running for the relevant kernel type by 1 KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() - self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) + if kernel_id in self._pending_kernel_tasks: + task = self._pending_kernel_tasks.pop(kernel_id) + task.cancel() + + self.stop_watching_activity(kernel_id) + self.stop_buffering(kernel_id) + + return await self.pinned_superclass._async_shutdown_kernel( + self, kernel_id, now=now, restart=restart + ) - async def restart_kernel(self, kernel_id, now=False): + shutdown_kernel = _async_shutdown_kernel + + async def _async_restart_kernel(self, kernel_id, now=False): """Restart a kernel by kernel_id""" self._check_kernel_id(kernel_id) - await ensure_async(self.pinned_superclass.restart_kernel(self, kernel_id, now=now)) + await self.pinned_superclass._async_restart_kernel(self, kernel_id, now=now) kernel = self.get_kernel(kernel_id) # return a Future that will resolve when the kernel has successfully restarted channel = kernel.connect_shell() - future = Future() + future: Future[Any] = Future() def finish(): """Common cleanup when restart finishes/fails for any reason.""" - if not channel.closed(): + if not channel.closed(): # type:ignore[operator] channel.close() loop.remove_timeout(timeout) kernel.remove_restart_callback(on_restart_failed, "dead") + kernel._pending_restart_cleanup = None # type:ignore[attr-defined] def on_reply(msg): self.log.debug("Kernel info reply received: %s", kernel_id) @@ -431,8 +469,9 @@ def on_restart_failed(): future.set_exception(RuntimeError("Restart failed")) kernel.add_restart_callback(on_restart_failed, "dead") + kernel._pending_restart_cleanup = finish # type:ignore[attr-defined] kernel.session.send(channel, "kernel_info_request") - channel.on_recv(on_reply) + channel.on_recv(on_reply) # type:ignore[operator] loop = IOLoop.current() timeout = loop.add_timeout(loop.time() + self.kernel_info_timeout, on_timeout) # Re-establish activity watching if ports have changed... @@ -441,6 +480,8 @@ def on_restart_failed(): self.start_watching_activity(kernel_id) return future + restart_kernel = _async_restart_kernel + def notify_connect(self, kernel_id): """Notice a new connection to a kernel""" if kernel_id in self._kernel_connections: @@ -479,7 +520,8 @@ def list_kernels(self): model = self.kernel_model(kernel_id) kernels.append(model) except (web.HTTPError, KeyError): - pass # Probably due to a (now) non-existent kernel, continue building the list + # Probably due to a (now) non-existent kernel, continue building the list + pass return kernels # override _check_kernel_id to raise 404 instead of KeyError @@ -533,41 +575,46 @@ def stop_watching_activity(self, kernel_id): """Stop watching IOPub messages on a kernel for activity.""" kernel = self._kernels[kernel_id] if getattr(kernel, "_activity_stream", None): - kernel._activity_stream.close() + if not kernel._activity_stream.socket.closed: + kernel._activity_stream.close() kernel._activity_stream = None + if getattr(kernel, "_pending_restart_cleanup", None): + kernel._pending_restart_cleanup() def initialize_culler(self): """Start idle culler if 'cull_idle_timeout' is greater than zero. Regardless of that value, set flag that we've been here. """ - if not self._initialized_culler and self.cull_idle_timeout > 0: - if self._culler_callback is None: - _ = IOLoop.current() - if self.cull_interval <= 0: # handle case where user set invalid value - self.log.warning( - "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", - self.cull_interval, - self.cull_interval_default, - ) - self.cull_interval = self.cull_interval_default - self._culler_callback = PeriodicCallback( - self.cull_kernels, 1000 * self.cull_interval - ) - self.log.info( - "Culling kernels with idle durations > %s seconds at %s second intervals ...", - self.cull_idle_timeout, + if ( + not self._initialized_culler + and self.cull_idle_timeout > 0 + and self._culler_callback is None + ): + _ = IOLoop.current() + if self.cull_interval <= 0: # handle case where user set invalid value + self.log.warning( + "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", self.cull_interval, + self.cull_interval_default, ) - if self.cull_busy: - self.log.info("Culling kernels even if busy") - if self.cull_connected: - self.log.info("Culling kernels even with connected clients") - self._culler_callback.start() + self.cull_interval = self.cull_interval_default + self._culler_callback = PeriodicCallback(self.cull_kernels, 1000 * self.cull_interval) + self.log.info( + "Culling kernels with idle durations > %s seconds at %s second intervals ...", + self.cull_idle_timeout, + self.cull_interval, + ) + if self.cull_busy: + self.log.info("Culling kernels even if busy") + if self.cull_connected: + self.log.info("Culling kernels even with connected clients") + self._culler_callback.start() self._initialized_culler = True async def cull_kernels(self): + """Handle culling kernels.""" self.log.debug( "Polling every %s seconds for kernels idle > %s seconds...", self.cull_interval, @@ -585,6 +632,7 @@ async def cull_kernels(self): ) async def cull_kernel_if_idle(self, kernel_id): + """Cull a kernel if it is idle.""" kernel = self._kernels[kernel_id] if getattr(kernel, "execution_state", None) == "dead": @@ -597,6 +645,9 @@ async def cull_kernel_if_idle(self, kernel_id): await ensure_async(self.shutdown_kernel(kernel_id)) return + kernel_spec_metadata = kernel.kernel_spec.metadata + cull_idle_timeout = kernel_spec_metadata.get("cull_idle_timeout", self.cull_idle_timeout) + if hasattr( kernel, "last_activity" ): # last_activity is monkey-patched, so ensure that has occurred @@ -609,7 +660,7 @@ async def cull_kernel_if_idle(self, kernel_id): dt_now = utcnow() dt_idle = dt_now - kernel.last_activity # Compute idle properties - is_idle_time = dt_idle > timedelta(seconds=self.cull_idle_timeout) + is_idle_time = dt_idle > timedelta(seconds=cull_idle_timeout) is_idle_execute = self.cull_busy or (kernel.execution_state != "busy") connections = self._kernel_connections.get(kernel_id, 0) is_idle_connected = self.cull_connected or not connections @@ -629,28 +680,189 @@ async def cull_kernel_if_idle(self, kernel_id): # AsyncMappingKernelManager inherits as much as possible from MappingKernelManager, # overriding only what is different. -class AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager): +class AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager): # type:ignore[misc] + """An asynchronous mapping kernel manager.""" + @default("kernel_manager_class") def _default_kernel_manager_class(self): - return "jupyter_client.ioloop.AsyncIOLoopKernelManager" + return "jupyter_server.services.kernels.kernelmanager.ServerKernelManager" + + @validate("kernel_manager_class") + def _validate_kernel_manager_class(self, proposal): + """A validator for the kernel manager class.""" + km_class_value = proposal.value + km_class = import_item(km_class_value) + if not issubclass(km_class, ServerKernelManager): + warnings.warn( + f"KernelManager class '{km_class}' is not a subclass of 'ServerKernelManager'. Custom " + "KernelManager classes should derive from 'ServerKernelManager' beginning with jupyter-server 2.0 " + "or risk missing functionality. Continuing...", + FutureWarning, + stacklevel=3, + ) + return km_class_value def __init__(self, **kwargs): - self.pinned_superclass = AsyncMultiKernelManager + """Initialize an async mapping kernel manager.""" + self.pinned_superclass = MultiKernelManager + self._pending_kernel_tasks = {} self.pinned_superclass.__init__(self, **kwargs) self.last_kernel_activity = utcnow() - async def shutdown_kernel(self, kernel_id, now=False, restart=False): - """Shutdown a kernel by kernel_id""" - self._check_kernel_id(kernel_id) - self.stop_watching_activity(kernel_id) - self.stop_buffering(kernel_id) - # Decrease the metric of number of kernels - # running for the relevant kernel type by 1 - KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() +def emit_kernel_action_event(success_msg: str = "") -> t.Callable[..., t.Any]: + """Decorate kernel action methods to + begin emitting jupyter kernel action events. + + Parameters + ---------- + success_msg: str + A formattable string that's passed to the message field of + the emitted event when the action succeeds. You can include + the kernel_id, kernel_name, or action in the message using + a formatted string argument, + e.g. "{kernel_id} succeeded to {action}." + + error_msg: str + A formattable string that's passed to the message field of + the emitted event when the action fails. You can include + the kernel_id, kernel_name, or action in the message using + a formatted string argument, + e.g. "{kernel_id} failed to {action}." + """ - # Finish shutting down the kernel before clearing state to avoid a race condition. - ret = await self.pinned_superclass.shutdown_kernel( - self, kernel_id, now=now, restart=restart - ) - return ret + def wrap_method(method): + @wraps(method) + async def wrapped_method(self, *args, **kwargs): + """""" + # Get the method name from the + action = method.__name__.replace("_kernel", "") + # If the method succeeds, emit a success event. + try: + out = await method(self, *args, **kwargs) + data = { + "kernel_name": self.kernel_name, + "action": action, + "status": "success", + "msg": success_msg.format( + kernel_id=self.kernel_id, kernel_name=self.kernel_name, action=action + ), + } + if self.kernel_id: + data["kernel_id"] = self.kernel_id + self.emit( + schema_id="https://events.jupyter.org/jupyter_server/kernel_actions/v1", + data=data, + ) + return out + # If the method fails, emit a failed event. + except Exception as err: + data = { + "kernel_name": self.kernel_name, + "action": action, + "status": "error", + "msg": str(err), + } + if self.kernel_id: + data["kernel_id"] = self.kernel_id + # If the exception is an HTTPError (usually via a gateway request) + # log the status_code and HTTPError log_message. + if isinstance(err, web.HTTPError): + msg = err.log_message or "" + data["status_code"] = err.status_code + data["msg"] = msg + self.emit( + schema_id="https://events.jupyter.org/jupyter_server/kernel_actions/v1", + data=data, + ) + raise err + + return wrapped_method + + return wrap_method + + +class ServerKernelManager(AsyncIOLoopKernelManager): + """A server-specific kernel manager.""" + + # Define activity-related attributes: + execution_state = Unicode( + None, allow_none=True, help="The current execution state of the kernel" + ) + reason = Unicode("", help="The reason for the last failure against the kernel") + + last_activity = Instance(datetime, help="The last activity on the kernel") + + # A list of pathlib objects, each pointing at an event + # schema to register with this kernel manager's eventlogger. + # This trait should not be overridden. + @property + def core_event_schema_paths(self) -> list[pathlib.Path]: + return [DEFAULT_EVENTS_SCHEMA_PATH / "kernel_actions" / "v1.yaml"] + + # This trait is intended for subclasses to override and define + # custom event schemas. + extra_event_schema_paths: List[str] = List( + default_value=[], + help=""" + A list of pathlib.Path objects pointing at to register with + the kernel manager's eventlogger. + """, + ).tag(config=True) + + event_logger = Instance(EventLogger) + + @default("event_logger") + def _default_event_logger(self): + """Initialize the logger and ensure all required events are present.""" + if ( + self.parent is not None + and self.parent.parent is not None + and hasattr(self.parent.parent, "event_logger") + ): + logger = self.parent.parent.event_logger + else: + # If parent does not have an event logger, create one. + logger = EventLogger() + # Ensure that all the expected schemas are registered. If not, register them. + schemas = self.core_event_schema_paths + self.extra_event_schema_paths + for schema_path in schemas: + # Try registering the event. + try: + logger.register_event_schema(schema_path) + # Pass if it already exists. + except SchemaRegistryException: + pass + return logger + + def emit(self, schema_id, data): + """Emit an event from the kernel manager.""" + self.event_logger.emit(schema_id=schema_id, data=data) + + @overrides + @emit_kernel_action_event( + success_msg="Kernel {kernel_id} was started.", + ) + async def start_kernel(self, *args, **kwargs): + return await super().start_kernel(*args, **kwargs) + + @overrides + @emit_kernel_action_event( + success_msg="Kernel {kernel_id} was shutdown.", + ) + async def shutdown_kernel(self, *args, **kwargs): + return await super().shutdown_kernel(*args, **kwargs) + + @overrides + @emit_kernel_action_event( + success_msg="Kernel {kernel_id} was restarted.", + ) + async def restart_kernel(self, *args, **kwargs): + return await super().restart_kernel(*args, **kwargs) + + @overrides + @emit_kernel_action_event( + success_msg="Kernel {kernel_id} was interrupted.", + ) + async def interrupt_kernel(self, *args, **kwargs): + return await super().interrupt_kernel(*args, **kwargs) diff --git a/jupyter_server/services/kernels/websocket.py b/jupyter_server/services/kernels/websocket.py new file mode 100644 index 0000000000..7473e2f320 --- /dev/null +++ b/jupyter_server/services/kernels/websocket.py @@ -0,0 +1,93 @@ +"""Tornado handlers for WebSocket <-> ZMQ sockets.""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. + +from tornado import web +from tornado.websocket import WebSocketHandler + +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.base.websocket import WebSocketMixin + +AUTH_RESOURCE = "kernels" + + +class KernelWebsocketHandler(WebSocketMixin, WebSocketHandler, JupyterHandler): # type:ignore[misc] + """The kernels websocket should connect""" + + auth_resource = AUTH_RESOURCE + + @property + def kernel_websocket_connection_class(self): + """The kernel websocket connection class.""" + return self.settings.get("kernel_websocket_connection_class") + + def set_default_headers(self): + """Undo the set_default_headers in JupyterHandler + + which doesn't make sense for websockets + """ + + def get_compression_options(self): + """Get the socket connection options.""" + return self.settings.get("websocket_compression_options", None) + + async def pre_get(self): + """Handle a pre_get.""" + # authenticate first + user = self.current_user + if user is None: + self.log.warning("Couldn't authenticate WebSocket connection") + raise web.HTTPError(403) + + # authorize the user. + if not self.authorizer.is_authorized(self, user, "execute", "kernels"): + raise web.HTTPError(403) + + kernel = self.kernel_manager.get_kernel(self.kernel_id) + self.connection = self.kernel_websocket_connection_class( + parent=kernel, websocket_handler=self, config=self.config + ) + + if self.get_argument("session_id", None): + self.connection.session.session = self.get_argument("session_id") + else: + self.log.warning("No session ID specified") + # For backwards compatibility with older versions + # of the websocket connection, call a prepare method if found. + if hasattr(self.connection, "prepare"): + await self.connection.prepare() + + async def get(self, kernel_id): + """Handle a get request for a kernel.""" + self.kernel_id = kernel_id + await self.pre_get() + await super().get(kernel_id=kernel_id) + + async def open(self, kernel_id): + """Open a kernel websocket.""" + # Need to call super here to make sure we + # begin a ping-pong loop with the client. + super().open() + # Wait for the kernel to emit an idle status. + self.log.info(f"Connecting to kernel {self.kernel_id}.") + await self.connection.connect() + + def on_message(self, ws_message): + """Get a kernel message from the websocket and turn it into a ZMQ message.""" + self.connection.handle_incoming_message(ws_message) + + def on_close(self): + """Handle a socket closure.""" + self.connection.disconnect() + self.connection = None + + def select_subprotocol(self, subprotocols): + """Select the sub protocol for the socket.""" + preferred_protocol = self.connection.kernel_ws_protocol + if preferred_protocol is None: + preferred_protocol = "v1.kernel.websocket.jupyter.org" + elif preferred_protocol == "": + preferred_protocol = None + selected_subprotocol = preferred_protocol if preferred_protocol in subprotocols else None + # None is the default, "legacy" protocol + return selected_subprotocol diff --git a/jupyter_server/services/kernelspecs/handlers.py b/jupyter_server/services/kernelspecs/handlers.py index 6cd5d9dcba..049b58fa83 100644 --- a/jupyter_server/services/kernelspecs/handlers.py +++ b/jupyter_server/services/kernelspecs/handlers.py @@ -4,18 +4,22 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import glob import json import os +from typing import Any pjoin = os.path.join +from jupyter_core.utils import ensure_async from tornado import web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from ...base.handlers import APIHandler -from ...utils import ensure_async, url_path_join, url_unescape +from ...utils import url_path_join, url_unescape AUTH_RESOURCE = "kernelspecs" @@ -25,7 +29,6 @@ def kernelspec_model(handler, name, spec_dict, resource_dir): d = {"name": name, "spec": spec_dict, "resources": {}} # Add resource files if they exist - resource_dir = resource_dir for resource in ["kernel.js", "kernel.css"]: if os.path.exists(pjoin(resource_dir, resource)): d["resources"][resource] = url_path_join( @@ -49,16 +52,21 @@ def is_kernelspec_model(spec_dict): class KernelSpecsAPIHandler(APIHandler): + """A kernel spec API handler.""" + auth_resource = AUTH_RESOURCE class MainKernelSpecHandler(KernelSpecsAPIHandler): + """The root kernel spec handler.""" + @web.authenticated @authorized async def get(self): + """Get the list of kernel specs.""" ksm = self.kernel_spec_manager km = self.kernel_manager - model = {} + model: dict[str, Any] = {} model["default"] = km.default_kernel_name model["kernelspecs"] = specs = {} kspecs = await ensure_async(ksm.get_all_specs()) @@ -82,9 +90,12 @@ async def get(self): class KernelSpecHandler(KernelSpecsAPIHandler): + """A handler for an individual kernel spec.""" + @web.authenticated @authorized async def get(self, kernel_name): + """Get a kernel spec model.""" ksm = self.kernel_spec_manager kernel_name = url_unescape(kernel_name) try: diff --git a/jupyter_server/services/nbconvert/handlers.py b/jupyter_server/services/nbconvert/handlers.py index 6cb979bd4c..bc0d38b9f9 100644 --- a/jupyter_server/services/nbconvert/handlers.py +++ b/jupyter_server/services/nbconvert/handlers.py @@ -1,25 +1,35 @@ +"""API Handlers for nbconvert.""" import asyncio import json from anyio.to_thread import run_sync from tornado import web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from ...base.handlers import APIHandler AUTH_RESOURCE = "nbconvert" -LOCK = asyncio.Lock() - - class NbconvertRootHandler(APIHandler): + """The nbconvert root API handler.""" + auth_resource = AUTH_RESOURCE + _exporter_lock: asyncio.Lock + + def initialize(self, **kwargs): + """Initialize an nbconvert root handler.""" + super().initialize(**kwargs) + # share lock across instances of this handler class + if not hasattr(self.__class__, "_exporter_lock"): + self.__class__._exporter_lock = asyncio.Lock() + self._exporter_lock = self.__class__._exporter_lock @web.authenticated @authorized async def get(self): + """Get the list of nbconvert exporters.""" try: from nbconvert.exporters import base except ImportError as e: @@ -28,22 +38,22 @@ async def get(self): # Some exporters use the filesystem when instantiating, delegate that # to a thread so we don't block the event loop for it. exporters = await run_sync(base.get_export_names) - for exporter_name in exporters: - try: - async with LOCK: + async with self._exporter_lock: + for exporter_name in exporters: + try: exporter_class = await run_sync(base.get_exporter, exporter_name) - except ValueError: - # I think the only way this will happen is if the entrypoint - # is uninstalled while this method is running - continue - # XXX: According to the docs, it looks like this should be set to None - # if the exporter shouldn't be exposed to the front-end and a friendly - # name if it should. However, none of the built-in exports have it defined. - # if not exporter_class.export_from_notebook: - # continue - res[exporter_name] = { - "output_mimetype": exporter_class.output_mimetype, - } + except ValueError: + # I think the only way this will happen is if the entrypoint + # is uninstalled while this method is running + continue + # XXX: According to the docs, it looks like this should be set to None + # if the exporter shouldn't be exposed to the front-end and a friendly + # name if it should. However, none of the built-in exports have it defined. + # if not exporter_class.export_from_notebook: + # continue + res[exporter_name] = { + "output_mimetype": exporter_class.output_mimetype, + } self.finish(json.dumps(res)) diff --git a/jupyter_server/services/security/handlers.py b/jupyter_server/services/security/handlers.py index 221f27302c..2e248c3e70 100644 --- a/jupyter_server/services/security/handlers.py +++ b/jupyter_server/services/security/handlers.py @@ -3,7 +3,7 @@ # Distributed under the terms of the Modified BSD License. from tornado import web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from ...base.handlers import APIHandler from . import csp_report_uri @@ -22,7 +22,7 @@ def skip_check_origin(self): return True def check_xsrf_cookie(self): - # don't check XSRF for CSP reports + """Don't check XSRF for CSP reports.""" return @web.authenticated diff --git a/jupyter_server/services/sessions/handlers.py b/jupyter_server/services/sessions/handlers.py index 04a71ce4b7..73e3deb420 100644 --- a/jupyter_server/services/sessions/handlers.py +++ b/jupyter_server/services/sessions/handlers.py @@ -13,10 +13,11 @@ from jupyter_client.jsonutil import date_default as json_default from jupyter_client.kernelspec import NoSuchKernel +from jupyter_core.utils import ensure_async from tornado import web -from jupyter_server.auth import authorized -from jupyter_server.utils import ensure_async, url_path_join +from jupyter_server.auth.decorator import authorized +from jupyter_server.utils import url_path_join from ...base.handlers import APIHandler @@ -24,14 +25,18 @@ class SessionsAPIHandler(APIHandler): + """A Sessions API handler.""" + auth_resource = AUTH_RESOURCE class SessionRootHandler(SessionsAPIHandler): + """A Session Root API handler.""" + @web.authenticated @authorized async def get(self): - # Return a list of running sessions + """Get a list of running sessions.""" sm = self.session_manager sessions = await ensure_async(sm.list_sessions()) self.finish(json.dumps(sessions, default=json_default)) @@ -39,7 +44,7 @@ async def get(self): @web.authenticated @authorized async def post(self): - # Creates a new session + """Create a new session.""" # (unless a session already exists for the named session) sm = self.session_manager @@ -47,12 +52,17 @@ async def post(self): if model is None: raise web.HTTPError(400, "No JSON data provided") - if "notebook" in model and "path" in model["notebook"]: + if "notebook" in model: self.log.warning("Sessions API changed, see updated swagger docs") - model["path"] = model["notebook"]["path"] model["type"] = "notebook" + if "name" in model["notebook"]: + model["path"] = model["notebook"]["name"] + elif "path" in model["notebook"]: + model["path"] = model["notebook"]["path"] try: + # There is a high chance here that `path` is not a path but + # a unique session id path = model["path"] except KeyError as e: raise web.HTTPError(400, "Missing field in JSON data: path") from e @@ -73,10 +83,10 @@ async def post(self): exists = await ensure_async(sm.session_exists(path=path)) if exists: - model = await sm.get_session(path=path) + s_model = await sm.get_session(path=path) else: try: - model = await sm.create_session( + s_model = await sm.create_session( path=path, kernel_name=kernel_name, kernel_id=kernel_id, @@ -91,22 +101,24 @@ async def post(self): status_msg = "%s not found" % kernel_name self.log.warning("Kernel not found: %s" % kernel_name) self.set_status(501) - self.finish(json.dumps(dict(message=msg, short_message=status_msg))) + self.finish(json.dumps({"message": msg, "short_message": status_msg})) return except Exception as e: raise web.HTTPError(500, str(e)) from e - location = url_path_join(self.base_url, "api", "sessions", model["id"]) + location = url_path_join(self.base_url, "api", "sessions", s_model["id"]) self.set_header("Location", location) self.set_status(201) - self.finish(json.dumps(model, default=json_default)) + self.finish(json.dumps(s_model, default=json_default)) class SessionHandler(SessionsAPIHandler): + """A handler for a single session.""" + @web.authenticated @authorized async def get(self, session_id): - # Returns the JSON model for a single session + """Get the JSON model for a single session.""" sm = self.session_manager model = await sm.get_session(session_id=session_id) self.finish(json.dumps(model, default=json_default)) @@ -158,21 +170,21 @@ async def patch(self, session_id): changes["kernel_id"] = kernel_id await sm.update_session(session_id, **changes) - model = await sm.get_session(session_id=session_id) + s_model = await sm.get_session(session_id=session_id) - if model["kernel"]["id"] != before["kernel"]["id"]: + if s_model["kernel"]["id"] != before["kernel"]["id"]: # kernel_id changed because we got a new kernel # shutdown the old one fut = asyncio.ensure_future(ensure_async(km.shutdown_kernel(before["kernel"]["id"]))) # If we are not using pending kernels, wait for the kernel to shut down if not getattr(km, "use_pending_kernels", None): await fut - self.finish(json.dumps(model, default=json_default)) + self.finish(json.dumps(s_model, default=json_default)) @web.authenticated @authorized async def delete(self, session_id): - # Deletes the session with given session_id + """Delete the session with given session_id.""" sm = self.session_manager try: await sm.delete_session(session_id) diff --git a/jupyter_server/services/sessions/sessionmanager.py b/jupyter_server/services/sessions/sessionmanager.py index 5ea14af5ac..5f3a19c71d 100644 --- a/jupyter_server/services/sessions/sessionmanager.py +++ b/jupyter_server/services/sessions/sessionmanager.py @@ -1,24 +1,29 @@ """A base class session manager.""" + # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +import os import pathlib import uuid +from typing import Any, Dict, List, NewType, Optional, Union, cast + +KernelName = NewType("KernelName", str) +ModelName = NewType("ModelName", str) try: import sqlite3 except ImportError: # fallback on pysqlite2 if Python was build without sqlite - from pysqlite2 import dbapi2 as sqlite3 + from pysqlite2 import dbapi2 as sqlite3 # type:ignore[no-redef] from dataclasses import dataclass, fields -from typing import Union +from jupyter_core.utils import ensure_async from tornado import web from traitlets import Instance, TraitError, Unicode, validate from traitlets.config.configurable import LoggingConfigurable from jupyter_server.traittypes import InstanceFromClasses -from jupyter_server.utils import ensure_async class KernelSessionRecordConflict(Exception): @@ -26,8 +31,6 @@ class KernelSessionRecordConflict(Exception): merge because of conflicting data. """ - pass - @dataclass class KernelSessionRecord: @@ -38,10 +41,11 @@ class KernelSessionRecord: associated with them. """ - session_id: Union[None, str] = None - kernel_id: Union[None, str] = None + session_id: Optional[str] = None + kernel_id: Optional[str] = None - def __eq__(self, other: "KernelSessionRecord") -> bool: + def __eq__(self, other: object) -> bool: + """Whether a record equals another.""" if isinstance(other, KernelSessionRecord): condition1 = self.kernel_id and self.kernel_id == other.kernel_id condition2 = all( @@ -62,24 +66,25 @@ def __eq__(self, other: "KernelSessionRecord") -> bool: self.kernel_id != other.kernel_id, ] ): - raise KernelSessionRecordConflict( + msg = ( "A single session_id can only have one kernel_id " "associated with. These two KernelSessionRecords share the same " "session_id but have different kernel_ids. This should " "not be possible and is likely an issue with the session " "records." ) + raise KernelSessionRecordConflict(msg) return False def update(self, other: "KernelSessionRecord") -> None: """Updates in-place a kernel from other (only accepts positive updates""" if not isinstance(other, KernelSessionRecord): - raise TypeError("'other' must be an instance of KernelSessionRecord.") + msg = "'other' must be an instance of KernelSessionRecord." # type:ignore[unreachable] + raise TypeError(msg) if other.kernel_id and self.kernel_id and other.kernel_id != self.kernel_id: - raise KernelSessionRecordConflict( - "Could not update the record from 'other' because the two records conflict." - ) + msg = "Could not update the record from 'other' because the two records conflict." + raise KernelSessionRecordConflict(msg) for field in fields(self): if hasattr(other, field.name) and getattr(other, field.name): @@ -95,15 +100,19 @@ class KernelSessionRecordList: it will be appended. """ - def __init__(self, *records): + _records: List[KernelSessionRecord] + + def __init__(self, *records: KernelSessionRecord): + """Initialize a record list.""" self._records = [] for record in records: self.update(record) def __str__(self): + """The string representation of a record list.""" return str(self._records) - def __contains__(self, record: Union[KernelSessionRecord, str]): + def __contains__(self, record: Union[KernelSessionRecord, str]) -> bool: """Search for records by kernel_id and session_id""" if isinstance(record, KernelSessionRecord) and record in self._records: return True @@ -115,6 +124,7 @@ def __contains__(self, record: Union[KernelSessionRecord, str]): return False def __len__(self): + """The length of the record list.""" return len(self._records) def get(self, record: Union[KernelSessionRecord, str]) -> KernelSessionRecord: @@ -123,13 +133,14 @@ def get(self, record: Union[KernelSessionRecord, str]) -> KernelSessionRecord: """ if isinstance(record, str): for r in self._records: - if record == r.kernel_id or record == r.session_id: + if record in (r.kernel_id, r.session_id): return r elif isinstance(record, KernelSessionRecord): for r in self._records: if record == r: return record - raise ValueError(f"{record} not found in KernelSessionRecordList.") + msg = f"{record} not found in KernelSessionRecordList." + raise ValueError(msg) def update(self, record: KernelSessionRecord) -> None: """Update a record in-place or append it if not in the list.""" @@ -148,6 +159,7 @@ def remove(self, record: KernelSessionRecord) -> None: class SessionManager(LoggingConfigurable): + """A session manager.""" database_filepath = Unicode( default_value=":memory:", @@ -161,6 +173,7 @@ class SessionManager(LoggingConfigurable): @validate("database_filepath") def _validate_database_filepath(self, proposal): + """Validate a database file path.""" value = proposal["value"] if value == ":memory:": return value @@ -168,15 +181,15 @@ def _validate_database_filepath(self, proposal): if path.exists(): # Verify that the database path is not a directory. if path.is_dir(): - raise TraitError( - "`database_filepath` expected a file path, but the given path is a directory." - ) + msg = "`database_filepath` expected a file path, but the given path is a directory." + raise TraitError(msg) # Verify that database path is an SQLite 3 Database by checking its header. with open(value, "rb") as f: header = f.read(100) - if not header.startswith(b"SQLite format 3") and not header == b"": - raise TraitError("The given file is not an SQLite database file.") + if not header.startswith(b"SQLite format 3") and header != b"": + msg = "The given file is not an SQLite database file." + raise TraitError(msg) return value kernel_manager = Instance("jupyter_server.services.kernels.kernelmanager.MappingKernelManager") @@ -188,6 +201,7 @@ def _validate_database_filepath(self, proposal): ) def __init__(self, *args, **kwargs): + """Initialize a record list.""" super().__init__(*args, **kwargs) self._pending_sessions = KernelSessionRecordList() @@ -242,14 +256,26 @@ async def session_exists(self, path): exists = True return exists - def new_session_id(self): - "Create a uuid for a new session" + def new_session_id(self) -> str: + """Create a uuid for a new session""" return str(uuid.uuid4()) async def create_session( - self, path=None, name=None, type=None, kernel_name=None, kernel_id=None - ): - """Creates a session and returns its model""" + self, + path: Optional[str] = None, + name: Optional[ModelName] = None, + type: Optional[str] = None, + kernel_name: Optional[KernelName] = None, + kernel_id: Optional[str] = None, + ) -> Dict[str, Any]: + """Creates a session and returns its model + + Parameters + ---------- + name: ModelName(str) + Usually the model name, like the filename associated with current + kernel. + """ session_id = self.new_session_id() record = KernelSessionRecord(session_id=session_id) self._pending_sessions.update(record) @@ -265,16 +291,61 @@ async def create_session( session_id, path=path, name=name, type=type, kernel_id=kernel_id ) self._pending_sessions.remove(record) - return result + return cast(Dict[str, Any], result) - async def start_kernel_for_session(self, session_id, path, name, type, kernel_name): - """Start a new kernel for a given session.""" + def get_kernel_env( + self, path: Optional[str], name: Optional[ModelName] = None + ) -> Dict[str, str]: + """Return the environment variables that need to be set in the kernel + + Parameters + ---------- + path : str + the url path for the given session. + name: ModelName(str), optional + Here the name is likely to be the name of the associated file + with the current kernel at startup time. + """ + if name is not None: + cwd = self.kernel_manager.cwd_for_path(path) + path = os.path.join(cwd, name) + assert isinstance(path, str) + return {**os.environ, "JPY_SESSION_NAME": path} + + async def start_kernel_for_session( + self, + session_id: str, + path: Optional[str], + name: Optional[ModelName], + type: Optional[str], + kernel_name: Optional[KernelName], + ) -> str: + """Start a new kernel for a given session. + + Parameters + ---------- + session_id : str + uuid for the session; this method must be given a session_id + path : str + the path for the given session - seem to be a session id sometime. + name : str + Usually the model name, like the filename associated with current + kernel. + type : str + the type of the session + kernel_name : str + the name of the kernel specification to use. The default kernel name will be used if not provided. + """ # allow contents manager to specify kernels cwd - kernel_path = self.contents_manager.get_kernel_path(path=path) + kernel_path = await ensure_async(self.contents_manager.get_kernel_path(path=path)) + + kernel_env = self.get_kernel_env(path, name) kernel_id = await self.kernel_manager.start_kernel( - path=kernel_path, kernel_name=kernel_name + path=kernel_path, + kernel_name=kernel_name, + env=kernel_env, ) - return kernel_id + return cast(str, kernel_id) async def save_session(self, session_id, path=None, name=None, type=None, kernel_id=None): """Saves the items for the session with the given session_id @@ -289,9 +360,9 @@ async def save_session(self, session_id, path=None, name=None, type=None, kernel uuid for the session; this method must be given a session_id path : str the path for the given session - name: str + name : str the name of the session - type: string + type : str the type of the session kernel_id : str a uuid for the kernel associated with this session @@ -316,7 +387,7 @@ async def get_session(self, **kwargs): Parameters ---------- - **kwargs : keyword argument + **kwargs : dict must be given one of the keywords and values from the session database (i.e. session_id, path, name, type, kernel_id) @@ -327,12 +398,14 @@ async def get_session(self, **kwargs): session described by the kwarg. """ if not kwargs: - raise TypeError("must specify a column to query") + msg = "must specify a column to query" + raise TypeError(msg) conditions = [] - for column in kwargs.keys(): + for column in kwargs: if column not in self._columns: - raise TypeError("No such column: %r", column) + msg = f"No such column: {column}" + raise TypeError(msg) conditions.append("%s=?" % column) query = "SELECT * FROM session WHERE %s" % (" AND ".join(conditions)) @@ -354,7 +427,7 @@ async def get_session(self, **kwargs): try: model = await self.row_to_model(row) except KeyError as e: - raise web.HTTPError(404, "Session not found: %s" % str(e)) + raise web.HTTPError(404, "Session not found: %s" % str(e)) from e return model async def update_session(self, session_id, **kwargs): @@ -379,20 +452,27 @@ async def update_session(self, session_id, **kwargs): return sets = [] - for column in kwargs.keys(): + for column in kwargs: if column not in self._columns: raise TypeError("No such column: %r" % column) sets.append("%s=?" % column) query = "UPDATE session SET %s WHERE session_id=?" % (", ".join(sets)) - self.cursor.execute(query, list(kwargs.values()) + [session_id]) + self.cursor.execute(query, [*list(kwargs.values()), session_id]) + + if hasattr(self.kernel_manager, "update_env"): + self.cursor.execute( + "SELECT path, name, kernel_id FROM session WHERE session_id=?", [session_id] + ) + path, name, kernel_id = self.cursor.fetchone() + self.kernel_manager.update_env(kernel_id=kernel_id, env=self.get_kernel_env(path, name)) - def kernel_culled(self, kernel_id): + async def kernel_culled(self, kernel_id: str) -> bool: """Checks if the kernel is still considered alive and returns true if its not found.""" return kernel_id not in self.kernel_manager async def row_to_model(self, row, tolerate_culled=False): """Takes sqlite database session row and turns it into a dictionary""" - kernel_culled = await ensure_async(self.kernel_culled(row["kernel_id"])) + kernel_culled: bool = await ensure_async(self.kernel_culled(row["kernel_id"])) if kernel_culled: # The kernel was culled or died without deleting the session. # We can't use delete_session here because that tries to find @@ -410,7 +490,7 @@ async def row_to_model(self, row, tolerate_culled=False): ) if tolerate_culled: self.log.warning(f"{msg} Continuing...") - return + return None raise KeyError(msg) kernel_model = await ensure_async(self.kernel_manager.kernel_model(row["kernel_id"])) diff --git a/jupyter_server/services/shutdown.py b/jupyter_server/services/shutdown.py index c8ad40fa57..a8c6787f0e 100644 --- a/jupyter_server/services/shutdown.py +++ b/jupyter_server/services/shutdown.py @@ -2,21 +2,25 @@ """ from tornado import ioloop, web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from jupyter_server.base.handlers import JupyterHandler AUTH_RESOURCE = "server" class ShutdownHandler(JupyterHandler): + """A shutdown API handler.""" + auth_resource = AUTH_RESOURCE @web.authenticated @authorized async def post(self): + """Shut down the server.""" self.log.info("Shutting down on /api/shutdown request.") - await self.serverapp._cleanup() + if self.serverapp: + await self.serverapp._cleanup() ioloop.IOLoop.current().stop() diff --git a/jupyter_server/terminal/__init__.py b/jupyter_server/terminal/__init__.py index c8d2856087..0dd1533c6a 100644 --- a/jupyter_server/terminal/__init__.py +++ b/jupyter_server/terminal/__init__.py @@ -1,52 +1,17 @@ -import os -import sys -from shutil import which +"""Terminals support.""" +import warnings -import terminado +# Shims +from jupyter_server_terminals import api_handlers +from jupyter_server_terminals.handlers import TermSocket +from jupyter_server_terminals.terminalmanager import TerminalManager -from ..utils import check_version - -if not check_version(terminado.__version__, "0.8.3"): - raise ImportError("terminado >= 0.8.3 required, found %s" % terminado.__version__) - -from jupyter_server.utils import url_path_join as ujoin - -from . import api_handlers -from .handlers import TermSocket -from .terminalmanager import TerminalManager +warnings.warn( + "Terminals support has moved to `jupyter_server_terminals`", + DeprecationWarning, + stacklevel=2, +) def initialize(webapp, root_dir, connection_url, settings): - if os.name == "nt": - default_shell = "powershell.exe" - else: - default_shell = which("sh") - shell_override = settings.get("shell_command") - shell = [os.environ.get("SHELL") or default_shell] if shell_override is None else shell_override - # When the notebook server is not running in a terminal (e.g. when - # it's launched by a JupyterHub spawner), it's likely that the user - # environment hasn't been fully set up. In that case, run a login - # shell to automatically source /etc/profile and the like, unless - # the user has specifically set a preferred shell command. - if os.name != "nt" and shell_override is None and not sys.stdout.isatty(): - shell.append("-l") - terminal_manager = webapp.settings["terminal_manager"] = TerminalManager( - shell_command=shell, - extra_env={ - "JUPYTER_SERVER_ROOT": root_dir, - "JUPYTER_SERVER_URL": connection_url, - }, - parent=webapp.settings["serverapp"], - ) - terminal_manager.log = webapp.settings["serverapp"].log - base_url = webapp.settings["base_url"] - handlers = [ - ( - ujoin(base_url, r"/terminals/websocket/(\w+)"), - TermSocket, - {"term_manager": terminal_manager}, - ), - (ujoin(base_url, r"/api/terminals"), api_handlers.TerminalRootHandler), - (ujoin(base_url, r"/api/terminals/(\w+)"), api_handlers.TerminalHandler), - ] - webapp.add_handlers(".*$", handlers) + """Included for backward compat, but no-op.""" diff --git a/jupyter_server/terminal/api_handlers.py b/jupyter_server/terminal/api_handlers.py index e521dd353a..c20f2d8be3 100644 --- a/jupyter_server/terminal/api_handlers.py +++ b/jupyter_server/terminal/api_handlers.py @@ -1,69 +1,6 @@ -import json -from pathlib import Path - -from tornado import web - -from jupyter_server.auth import authorized - -from ..base.handlers import APIHandler - -AUTH_RESOURCE = "terminals" - - -class TerminalAPIHandler(APIHandler): - auth_resource = AUTH_RESOURCE - - -class TerminalRootHandler(TerminalAPIHandler): - @web.authenticated - @authorized - def get(self): - models = self.terminal_manager.list() - self.finish(json.dumps(models)) - - @web.authenticated - @authorized - def post(self): - """POST /terminals creates a new terminal and redirects to it""" - data = self.get_json_body() or {} - - # if cwd is a relative path, it should be relative to the root_dir, - # but if we pass it as relative, it will we be considered as relative to - # the path jupyter_server was started in - if "cwd" in data: - cwd = Path(data["cwd"]) - if not cwd.resolve().exists(): - cwd = Path(self.settings["server_root_dir"]).expanduser() / cwd - if not cwd.resolve().exists(): - cwd = None - - if cwd is None: - server_root_dir = self.settings["server_root_dir"] - self.log.debug( - f"Failed to find requested terminal cwd: {data.get('cwd')}\n" - f" It was not found within the server root neither: {server_root_dir}." - ) - del data["cwd"] - else: - self.log.debug(f"Opening terminal in: {cwd.resolve()!s}") - data["cwd"] = str(cwd.resolve()) - - model = self.terminal_manager.create(**data) - self.finish(json.dumps(model)) - - -class TerminalHandler(TerminalAPIHandler): - SUPPORTED_METHODS = ("GET", "DELETE") - - @web.authenticated - @authorized - def get(self, name): - model = self.terminal_manager.get(name) - self.finish(json.dumps(model)) - - @web.authenticated - @authorized - async def delete(self, name): - await self.terminal_manager.terminate(name, force=True) - self.set_status(204) - self.finish() +"""Terminal API handlers.""" +from jupyter_server_terminals.api_handlers import ( + TerminalAPIHandler, + TerminalHandler, + TerminalRootHandler, +) diff --git a/jupyter_server/terminal/handlers.py b/jupyter_server/terminal/handlers.py index fde65e4a0a..f3da8aa91c 100644 --- a/jupyter_server/terminal/handlers.py +++ b/jupyter_server/terminal/handlers.py @@ -1,55 +1,4 @@ """Tornado handlers for the terminal emulator.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import terminado -from tornado import web - -from jupyter_server._tz import utcnow -from jupyter_server.auth.utils import warn_disabled_authorization - -from ..base.handlers import JupyterHandler -from ..base.zmqhandlers import WebSocketMixin - -AUTH_RESOURCE = "terminals" - - -class TermSocket(WebSocketMixin, JupyterHandler, terminado.TermSocket): - - auth_resource = AUTH_RESOURCE - - def origin_check(self): - """Terminado adds redundant origin_check - Tornado already calls check_origin, so don't do anything here. - """ - return True - - def get(self, *args, **kwargs): - user = self.current_user - - if not user: - raise web.HTTPError(403) - - # authorize the user. - if not self.authorizer: - # Warn if there is not authorizer. - warn_disabled_authorization() - elif not self.authorizer.is_authorized(self, user, "execute", self.auth_resource): - raise web.HTTPError(403) - - if not args[0] in self.term_manager.terminals: - raise web.HTTPError(404) - return super().get(*args, **kwargs) - - def on_message(self, message): - super().on_message(message) - self._update_activity() - - def write_message(self, message, binary=False): - super().write_message(message, binary=binary) - self._update_activity() - - def _update_activity(self): - self.application.settings["terminal_last_activity"] = utcnow() - # terminal may not be around on deletion/cull - if self.term_name in self.terminal_manager.terminals: - self.terminal_manager.terminals[self.term_name].last_activity = utcnow() +from jupyter_server_terminals.handlers import TermSocket diff --git a/jupyter_server/terminal/terminalmanager.py b/jupyter_server/terminal/terminalmanager.py index 4a6debe05e..6f9ada1b71 100644 --- a/jupyter_server/terminal/terminalmanager.py +++ b/jupyter_server/terminal/terminalmanager.py @@ -4,165 +4,4 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -from datetime import timedelta - -import terminado -from tornado import web -from tornado.ioloop import IOLoop, PeriodicCallback -from traitlets import Integer -from traitlets.config import LoggingConfigurable - -from jupyter_server._tz import isoformat, utcnow - -from ..prometheus.metrics import TERMINAL_CURRENTLY_RUNNING_TOTAL - - -class TerminalManager(LoggingConfigurable, terminado.NamedTermManager): - """ """ - - _culler_callback = None - - _initialized_culler = False - - cull_inactive_timeout = Integer( - 0, - config=True, - help="""Timeout (in seconds) in which a terminal has been inactive and ready to be culled. - Values of 0 or lower disable culling.""", - ) - - cull_interval_default = 300 # 5 minutes - cull_interval = Integer( - cull_interval_default, - config=True, - help="""The interval (in seconds) on which to check for terminals exceeding the inactive timeout value.""", - ) - - # ------------------------------------------------------------------------- - # Methods for managing terminals - # ------------------------------------------------------------------------- - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def create(self, **kwargs): - """Create a new terminal.""" - name, term = self.new_named_terminal(**kwargs) - # Monkey-patch last-activity, similar to kernels. Should we need - # more functionality per terminal, we can look into possible sub- - # classing or containment then. - term.last_activity = utcnow() - model = self.get_terminal_model(name) - # Increase the metric by one because a new terminal was created - TERMINAL_CURRENTLY_RUNNING_TOTAL.inc() - # Ensure culler is initialized - self._initialize_culler() - return model - - def get(self, name): - """Get terminal 'name'.""" - model = self.get_terminal_model(name) - return model - - def list(self): - """Get a list of all running terminals.""" - models = [self.get_terminal_model(name) for name in self.terminals] - - # Update the metric below to the length of the list 'terms' - TERMINAL_CURRENTLY_RUNNING_TOTAL.set(len(models)) - return models - - async def terminate(self, name, force=False): - """Terminate terminal 'name'.""" - self._check_terminal(name) - await super().terminate(name, force=force) - - # Decrease the metric below by one - # because a terminal has been shutdown - TERMINAL_CURRENTLY_RUNNING_TOTAL.dec() - - async def terminate_all(self): - """Terminate all terminals.""" - terms = [name for name in self.terminals] - for term in terms: - await self.terminate(term, force=True) - - def get_terminal_model(self, name): - """Return a JSON-safe dict representing a terminal. - For use in representing terminals in the JSON APIs. - """ - self._check_terminal(name) - term = self.terminals[name] - model = { - "name": name, - "last_activity": isoformat(term.last_activity), - } - return model - - def _check_terminal(self, name): - """Check a that terminal 'name' exists and raise 404 if not.""" - if name not in self.terminals: - raise web.HTTPError(404, "Terminal not found: %s" % name) - - def _initialize_culler(self): - """Start culler if 'cull_inactive_timeout' is greater than zero. - Regardless of that value, set flag that we've been here. - """ - if not self._initialized_culler and self.cull_inactive_timeout > 0: - if self._culler_callback is None: - _ = IOLoop.current() - if self.cull_interval <= 0: # handle case where user set invalid value - self.log.warning( - "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", - self.cull_interval, - self.cull_interval_default, - ) - self.cull_interval = self.cull_interval_default - self._culler_callback = PeriodicCallback( - self._cull_terminals, 1000 * self.cull_interval - ) - self.log.info( - "Culling terminals with inactivity > %s seconds at %s second intervals ...", - self.cull_inactive_timeout, - self.cull_interval, - ) - self._culler_callback.start() - - self._initialized_culler = True - - async def _cull_terminals(self): - self.log.debug( - "Polling every %s seconds for terminals inactive for > %s seconds...", - self.cull_interval, - self.cull_inactive_timeout, - ) - # Create a separate list of terminals to avoid conflicting updates while iterating - for name in list(self.terminals): - try: - await self._cull_inactive_terminal(name) - except Exception as e: - self.log.exception( - "The following exception was encountered while checking the " - "activity of terminal {}: {}".format(name, e) - ) - - async def _cull_inactive_terminal(self, name): - try: - term = self.terminals[name] - except KeyError: - return # KeyErrors are somewhat expected since the terminal can be terminated as the culling check is made. - - self.log.debug("name=%s, last_activity=%s", name, term.last_activity) - if hasattr(term, "last_activity"): - dt_now = utcnow() - dt_inactive = dt_now - term.last_activity - # Compute idle properties - is_time = dt_inactive > timedelta(seconds=self.cull_inactive_timeout) - # Cull the kernel if all three criteria are met - if is_time: - inactivity = int(dt_inactive.total_seconds()) - self.log.warning( - "Culling terminal '%s' due to %s seconds of inactivity.", - name, - inactivity, - ) - await self.terminate(name, force=True) +from jupyter_server_terminals.terminalmanager import TerminalManager diff --git a/jupyter_server/traittypes.py b/jupyter_server/traittypes.py index cad8b4e204..f17f3a0a24 100644 --- a/jupyter_server/traittypes.py +++ b/jupyter_server/traittypes.py @@ -1,13 +1,16 @@ +"""Custom trait types.""" import inspect from ast import literal_eval -from traitlets import ClassBasedTraitType, TraitError, Undefined +from traitlets import Any, ClassBasedTraitType, TraitError, Undefined from traitlets.utils.descriptions import describe -class TypeFromClasses(ClassBasedTraitType): +class TypeFromClasses(ClassBasedTraitType): # type:ignore[type-arg] """A trait whose value must be a subclass of a class in a specified list of classes.""" + default_value: Any + def __init__(self, default_value=Undefined, klasses=None, **kwargs): """Construct a Type trait A Type trait specifies that its values must be subclasses of @@ -42,11 +45,13 @@ def __init__(self, default_value=Undefined, klasses=None, **kwargs): # OneOfType requires a list of klasses to be specified (different than Type). if not isinstance(klasses, (list, tuple, set)): - raise TraitError("`klasses` must be a list of class names (type is str) or classes.") + msg = "`klasses` must be a list of class names (type is str) or classes." + raise TraitError(msg) for klass in klasses: if not (inspect.isclass(klass) or isinstance(klass, str)): - raise TraitError("A OneOfType trait must specify a list of classes.") + msg = "A OneOfType trait must specify a list of classes." + raise TraitError(msg) # Store classes. self.klasses = klasses @@ -54,7 +59,7 @@ def __init__(self, default_value=Undefined, klasses=None, **kwargs): super().__init__(new_default_value, **kwargs) def subclass_from_klasses(self, value): - "Check that a given class is a subclasses found in the klasses list." + """Check that a given class is a subclasses found in the klasses list.""" return any(issubclass(value, klass) for klass in self.importable_klasses) def validate(self, obj, value): @@ -62,11 +67,12 @@ def validate(self, obj, value): if isinstance(value, str): try: value = self._resolve_string(value) - except ImportError: - raise TraitError( - "The '%s' trait of %s instance must be a type, but " - "%r could not be imported" % (self.name, obj, value) + except ImportError as e: + emsg = ( + f"The '{self.name}' trait of {obj} instance must be a type, but " + f"{value!r} could not be imported" ) + raise TraitError(emsg) from e try: if self.subclass_from_klasses(value): return value @@ -80,26 +86,27 @@ def info(self): result = "a subclass of " for klass in self.klasses: if not isinstance(klass, str): - klass = klass.__module__ + "." + klass.__name__ + klass = klass.__module__ + "." + klass.__name__ # noqa: PLW2901 result += f"{klass} or " # Strip the last "or" - result = result.strip(" or ") # noqa B005 + result = result.strip(" or ") # noqa: B005 if self.allow_none: return result + " or None" return result def instance_init(self, obj): + """Initialize an instance.""" self._resolve_classes() super().instance_init(obj) def _resolve_classes(self): - # Resolve all string names to actual classes. + """Resolve all string names to actual classes.""" self.importable_klasses = [] for klass in self.klasses: if isinstance(klass, str): # Try importing the classes to compare. Silently, ignore if not importable. try: - klass = self._resolve_string(klass) + klass = self._resolve_string(klass) # noqa: PLW2901 self.importable_klasses.append(klass) except Exception: pass @@ -107,9 +114,10 @@ def _resolve_classes(self): self.importable_klasses.append(klass) if isinstance(self.default_value, str): - self.default_value = self._resolve_string(self.default_value) + self.default_value = self._resolve_string(self.default_value) # type:ignore[arg-type] def default_value_repr(self): + """The default value repr.""" value = self.default_value if isinstance(value, str): return repr(value) @@ -117,7 +125,7 @@ def default_value_repr(self): return repr(f"{value.__module__}.{value.__name__}") -class InstanceFromClasses(ClassBasedTraitType): +class InstanceFromClasses(ClassBasedTraitType): # type:ignore[type-arg] """A trait whose value must be an instance of a class in a specified list of classes. The value can also be an instance of a subclass of the specified classes. Subclasses can declare default classes by overriding the klass attribute @@ -148,7 +156,7 @@ class or its subclasses. Our implementation is quite different None, the None is replaced by ``()`` or ``{}``, respectively. """ # If class - if klasses is None: + if klasses is None: # noqa: SIM114 self.klasses = klasses # Verify all elements are either classes or strings. elif all(inspect.isclass(k) or isinstance(k, str) for k in klasses): @@ -160,9 +168,11 @@ class or its subclasses. Our implementation is quite different ) if (kw is not None) and not isinstance(kw, dict): - raise TraitError("The 'kw' argument must be a dict or None.") + msg = "The 'kw' argument must be a dict or None." + raise TraitError(msg) if (args is not None) and not isinstance(args, tuple): - raise TraitError("The 'args' argument must be a tuple or None.") + msg = "The 'args' argument must be a tuple or None." + raise TraitError(msg) self.default_args = args self.default_kwargs = kw @@ -170,40 +180,45 @@ class or its subclasses. Our implementation is quite different super().__init__(**kwargs) def instance_from_importable_klasses(self, value): - "Check that a given class is a subclasses found in the klasses list." + """Check that a given class is a subclasses found in the klasses list.""" return any(isinstance(value, klass) for klass in self.importable_klasses) def validate(self, obj, value): + """Validate an instance.""" if self.instance_from_importable_klasses(value): return value else: self.error(obj, value) def info(self): + """Get the trait info.""" result = "an instance of " + assert self.klasses is not None for klass in self.klasses: if isinstance(klass, str): result += klass else: result += describe("a", klass) result += " or " - result = result.strip(" or ") # noqa B005 + result = result.strip(" or ") # noqa: B005 if self.allow_none: result += " or None" return result def instance_init(self, obj): + """Initialize the trait.""" self._resolve_classes() super().instance_init(obj) def _resolve_classes(self): - # Resolve all string names to actual classes. + """Resolve all string names to actual classes.""" self.importable_klasses = [] + assert self.klasses is not None for klass in self.klasses: if isinstance(klass, str): # Try importing the classes to compare. Silently, ignore if not importable. try: - klass = self._resolve_string(klass) + klass = self._resolve_string(klass) # noqa: PLW2901 self.importable_klasses.append(klass) except Exception: pass @@ -211,12 +226,17 @@ def _resolve_classes(self): self.importable_klasses.append(klass) def make_dynamic_default(self): + """Make the dynamic default for the trait.""" if (self.default_args is None) and (self.default_kwargs is None): return None - return self.klass(*(self.default_args or ()), **(self.default_kwargs or {})) + return self.klass( # type:ignore[attr-defined] + *(self.default_args or ()), **(self.default_kwargs or {}) + ) def default_value_repr(self): + """Get the default value repr.""" return repr(self.make_dynamic_default()) def from_string(self, s): + """Convert from a string.""" return literal_eval(s) diff --git a/jupyter_server/transutils.py b/jupyter_server/transutils.py index 2ca30e437d..f63e1e1d80 100644 --- a/jupyter_server/transutils.py +++ b/jupyter_server/transutils.py @@ -7,6 +7,7 @@ def _trans_gettext_deprecation_helper(*args, **kwargs): + """The trans gettext deprecation helper.""" warn_msg = "The alias `_()` will be deprecated. Use `_i18n()` instead." warnings.warn(warn_msg, FutureWarning, stacklevel=2) return trans.gettext(*args, **kwargs) diff --git a/jupyter_server/utils.py b/jupyter_server/utils.py index c7eb9a71f5..2a4c185d97 100644 --- a/jupyter_server/utils.py +++ b/jupyter_server/utils.py @@ -1,25 +1,44 @@ """Notebook related utilities""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import asyncio +from __future__ import annotations + import errno import importlib.util -import inspect import os import socket import sys +import warnings from contextlib import contextmanager -from urllib.parse import urljoin # noqa: F401 -from urllib.parse import SplitResult, quote, unquote, urlparse, urlsplit, urlunsplit -from urllib.request import pathname2url # noqa: F401 +from typing import Any, Generator, NewType, Sequence +from urllib.parse import ( + SplitResult, + quote, + unquote, + urlparse, + urlsplit, + urlunsplit, +) +from urllib.parse import ( + urljoin as _urljoin, +) +from urllib.request import pathname2url as _pathname2url from _frozen_importlib_external import _NamespacePath +from jupyter_core.utils import ensure_async as _ensure_async from packaging.version import Version -from tornado.httpclient import AsyncHTTPClient, HTTPClient, HTTPRequest +from tornado.httpclient import AsyncHTTPClient, HTTPClient, HTTPRequest, HTTPResponse from tornado.netutil import Resolver +ApiPath = NewType("ApiPath", str) + +# Re-export +urljoin = _urljoin +pathname2url = _pathname2url +ensure_async = _ensure_async + -def url_path_join(*pieces): +def url_path_join(*pieces: str) -> str: """Join components of url into a relative url Use to prevent double slash when joining subpath. This will leave the @@ -38,12 +57,12 @@ def url_path_join(*pieces): return result -def url_is_absolute(url): +def url_is_absolute(url: str) -> bool: """Determine whether a given URL is absolute""" return urlparse(url).path.startswith("/") -def path2url(path): +def path2url(path: str) -> str: """Convert a local file path to a URL""" pieces = [quote(p) for p in path.split(os.sep)] # preserve trailing / @@ -53,14 +72,14 @@ def path2url(path): return url -def url2path(url): +def url2path(url: str) -> str: """Convert a URL to a local file path""" pieces = [unquote(p) for p in url.split("/")] path = os.path.join(*pieces) return path -def url_escape(path): +def url_escape(path: str) -> str: """Escape special characters in a URL path Turns '/foo bar/' into '/foo%20bar/' @@ -69,7 +88,7 @@ def url_escape(path): return "/".join([quote(p) for p in parts]) -def url_unescape(path): +def url_unescape(path: str) -> str: """Unescape special characters in a URL path Turns '/foo%20bar/' into '/foo bar/' @@ -77,7 +96,7 @@ def url_unescape(path): return "/".join([unquote(p) for p in path.split("/")]) -def samefile_simple(path, other_path): +def samefile_simple(path: str, other_path: str) -> bool: """ Fill in for os.path.samefile when it is unavailable (Windows+py2). @@ -90,8 +109,10 @@ def samefile_simple(path, other_path): Parameters ---------- - path : String representing a path to a file - other_path : String representing a path to another file + path : str + representing a path to a file + other_path : str + representing a path to another file Returns ------- @@ -102,19 +123,19 @@ def samefile_simple(path, other_path): return path.lower() == other_path.lower() and path_stat == other_path_stat -def to_os_path(path, root=""): +def to_os_path(path: ApiPath, root: str = "") -> str: """Convert an API path to a filesystem path If given, root will be prepended to the path. root must be a filesystem path already. """ - parts = path.strip("/").split("/") - parts = [p for p in parts if p != ""] # remove duplicate splits - path = os.path.join(root, *parts) - return path + parts = str(path).strip("/").split("/") + parts = [p for p in parts if p != ""] # remove duplicate splits + path_ = os.path.join(root, *parts) + return os.path.normpath(path_) -def to_api_path(os_path, root=""): +def to_api_path(os_path: str, root: str = "") -> ApiPath: """Convert a filesystem path to an API path If given, root will be removed from the path. @@ -125,10 +146,10 @@ def to_api_path(os_path, root=""): parts = os_path.strip(os.path.sep).split(os.path.sep) parts = [p for p in parts if p != ""] # remove duplicate splits path = "/".join(parts) - return path + return ApiPath(path) -def check_version(v, check): +def check_version(v: str, check: str) -> bool: """check version string v >= check If dev/prerelease tags result in TypeError for string-number comparison, @@ -136,7 +157,7 @@ def check_version(v, check): Users on dev branches are responsible for keeping their own packages up to date. """ try: - return Version(v) >= Version(check) + return bool(Version(v) >= Version(check)) except TypeError: return True @@ -144,15 +165,15 @@ def check_version(v, check): # Copy of IPython.utils.process.check_pid: -def _check_pid_win32(pid): +def _check_pid_win32(pid: int) -> bool: import ctypes # OpenProcess returns 0 if no such process (of ours) exists # positive int otherwise - return bool(ctypes.windll.kernel32.OpenProcess(1, 0, pid)) + return bool(ctypes.windll.kernel32.OpenProcess(1, 0, pid)) # type:ignore[attr-defined] -def _check_pid_posix(pid): +def _check_pid_posix(pid: int) -> bool: """Copy of IPython.utils.process.check_pid""" try: os.kill(pid, 0) @@ -173,108 +194,32 @@ def _check_pid_posix(pid): check_pid = _check_pid_posix -async def ensure_async(obj): - """Convert a non-awaitable object to a coroutine if needed, - and await it if it was not already awaited. - """ - if inspect.isawaitable(obj): - try: - result = await obj - except RuntimeError as e: - if str(e) == "cannot reuse already awaited coroutine": - # obj is already the coroutine's result - return obj - raise - return result - # obj doesn't need to be awaited - return obj - - -def run_sync(maybe_async): - """If async, runs maybe_async and blocks until it has executed, - possibly creating an event loop. - If not async, just returns maybe_async as it is the result of something - that has already executed. - - Parameters - ---------- - maybe_async : async or non-async object - The object to be executed, if it is async. - - Returns - ------- - result - Whatever the async object returns, or the object itself. - """ - if not inspect.isawaitable(maybe_async): - # that was not something async, just return it - return maybe_async - # it is async, we need to run it in an event loop - - def wrapped(): - create_new_event_loop = False - try: - loop = asyncio.get_event_loop() - except RuntimeError: - create_new_event_loop = True - else: - if loop.is_closed(): - create_new_event_loop = True - if create_new_event_loop: - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - try: - result = loop.run_until_complete(maybe_async) - except RuntimeError as e: - if str(e) == "This event loop is already running": - # just return a Future, hoping that it will be awaited - result = asyncio.ensure_future(maybe_async) - else: - raise e - return result - - return wrapped() - - async def run_sync_in_loop(maybe_async): - """Runs a function synchronously whether it is an async function or not. - - If async, runs maybe_async and blocks until it has executed. - - If not async, just returns maybe_async as it is the result of something - that has already executed. - - Parameters - ---------- - maybe_async : async or non-async object - The object to be executed, if it is async. - - Returns - ------- - result - Whatever the async object returns, or the object itself. - """ - if not inspect.isawaitable(maybe_async): - return maybe_async - return await maybe_async + """**DEPRECATED**: Use ``ensure_async`` from jupyter_core instead.""" + warnings.warn( + "run_sync_in_loop is deprecated since Jupyter Server 2.0, use 'ensure_async' from jupyter_core instead", + DeprecationWarning, + stacklevel=2, + ) + return ensure_async(maybe_async) -def urlencode_unix_socket_path(socket_path): +def urlencode_unix_socket_path(socket_path: str) -> str: """Encodes a UNIX socket path string from a socket path for the `http+unix` URI form.""" return socket_path.replace("/", "%2F") -def urldecode_unix_socket_path(socket_path): +def urldecode_unix_socket_path(socket_path: str) -> str: """Decodes a UNIX sock path string from an encoded sock path for the `http+unix` URI form.""" return socket_path.replace("%2F", "/") -def urlencode_unix_socket(socket_path): +def urlencode_unix_socket(socket_path: str) -> str: """Encodes a UNIX socket URL from a socket path for the `http+unix` URI form.""" return "http+unix://%s" % urlencode_unix_socket_path(socket_path) -def unix_socket_in_use(socket_path): +def unix_socket_in_use(socket_path: str) -> bool: """Checks whether a UNIX socket path on disk is in use by attempting to connect to it.""" if not os.path.exists(socket_path): return False @@ -291,7 +236,9 @@ def unix_socket_in_use(socket_path): @contextmanager -def _request_for_tornado_client(urlstring, method="GET", body=None, headers=None): +def _request_for_tornado_client( + urlstring: str, method: str = "GET", body: Any = None, headers: Any = None +) -> Generator[HTTPRequest, None, None]: """A utility that provides a context that handles HTTP, HTTPS, and HTTP+UNIX request. Creates a tornado HTTPRequest object with a URL @@ -333,35 +280,44 @@ async def resolve(self, host, port, *args, **kwargs): resolver = UnixSocketResolver(resolver=Resolver()) AsyncHTTPClient.configure(None, resolver=resolver) else: - raise Exception("Unknown URL scheme.") + msg = "Unknown URL scheme." + raise Exception(msg) # Yield the request for the given client. url = urlunsplit(parts) - request = HTTPRequest(url, method=method, body=body, headers=headers) + request = HTTPRequest(url, method=method, body=body, headers=headers, validate_cert=False) yield request -def fetch(urlstring, method="GET", body=None, headers=None): +def fetch( + urlstring: str, method: str = "GET", body: Any = None, headers: Any = None +) -> HTTPResponse: """ Send a HTTP, HTTPS, or HTTP+UNIX request to a Tornado Web Server. Returns a tornado HTTPResponse. """ - with _request_for_tornado_client(urlstring) as request: + with _request_for_tornado_client( + urlstring, method=method, body=body, headers=headers + ) as request: response = HTTPClient(AsyncHTTPClient).fetch(request) return response -async def async_fetch(urlstring, method="GET", body=None, headers=None, io_loop=None): +async def async_fetch( + urlstring: str, method: str = "GET", body: Any = None, headers: Any = None, io_loop: Any = None +) -> HTTPResponse: """ Send an asynchronous HTTP, HTTPS, or HTTP+UNIX request to a Tornado Web Server. Returns a tornado HTTPResponse. """ - with _request_for_tornado_client(urlstring) as request: + with _request_for_tornado_client( + urlstring, method=method, body=body, headers=headers + ) as request: response = await AsyncHTTPClient(io_loop).fetch(request) return response -def is_namespace_package(namespace): +def is_namespace_package(namespace: str) -> bool | None: """Is the provided namespace a Python Namespace Package (PEP420). https://www.python.org/dev/peps/pep-0420/#specification @@ -381,17 +337,22 @@ def is_namespace_package(namespace): return isinstance(spec.submodule_search_locations, _NamespacePath) -def filefind(filename, path_dirs=None): +def filefind(filename: str, path_dirs: Sequence[str] | str | None = None) -> str: """Find a file by looking through a sequence of paths. This iterates through a sequence of paths looking for a file and returns - the full, absolute path of the first occurence of the file. If no set of + the full, absolute path of the first occurrence of the file. If no set of path dirs is given, the filename is tested as is, after running through :func:`expandvars` and :func:`expanduser`. Thus a simple call:: + filefind('myfile.txt') + will find the file in the current working dir, but:: + filefind('~/myfile.txt') + Will find the file in the users home directory. This function does not automatically try any paths, such as the cwd or the user's home directory. + Parameters ---------- filename : str @@ -402,6 +363,7 @@ def filefind(filename, path_dirs=None): put into a sequence and the searched. If a sequence, walk through each element and join with ``filename``, calling :func:`expandvars` and :func:`expanduser` before testing for existence. + Returns ------- Raises :exc:`IOError` or returns absolute path to file. @@ -420,16 +382,18 @@ def filefind(filename, path_dirs=None): for path in path_dirs: if path == ".": - path = os.getcwd() + path = os.getcwd() # noqa: PLW2901 testname = expand_path(os.path.join(path, filename)) if os.path.isfile(testname): return os.path.abspath(testname) - raise OSError(f"File {filename!r} does not exist in any of the search paths: {path_dirs!r}") + msg = f"File {filename!r} does not exist in any of the search paths: {path_dirs!r}" + raise OSError(msg) -def expand_path(s): +def expand_path(s: str) -> str: """Expand $VARS and ~names in a string, like a shell + :Examples: In [2]: os.environ['FOO']='test' In [3]: expand_path('variable FOO is $FOO') @@ -448,13 +412,13 @@ def expand_path(s): return s -def import_item(name): +def import_item(name: str) -> Any: """Import and return ``bar`` given the string ``foo.bar``. Calling ``bar = import_item("foo.bar")`` is the functional equivalent of executing the code ``from foo import bar``. Parameters ---------- - name : string + name : str The fully qualified name of the module/package being imported. Returns ------- @@ -469,8 +433,8 @@ def import_item(name): module = __import__(package, fromlist=[obj]) try: pak = getattr(module, obj) - except AttributeError: - raise ImportError("No module named %s" % obj) + except AttributeError as e: + raise ImportError("No module named %s" % obj) from e return pak else: # called with un-dotted string diff --git a/jupyter_server/view/__init__.py b/jupyter_server/view/__init__.py index e69de29bb2..d0ad2546e1 100644 --- a/jupyter_server/view/__init__.py +++ b/jupyter_server/view/__init__.py @@ -0,0 +1 @@ +"""Tornado handlers for viewing HTML files.""" diff --git a/jupyter_server/view/handlers.py b/jupyter_server/view/handlers.py index 40702e1e70..428e576264 100644 --- a/jupyter_server/view/handlers.py +++ b/jupyter_server/view/handlers.py @@ -1,12 +1,13 @@ """Tornado handlers for viewing HTML files.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from jupyter_core.utils import ensure_async from tornado import web -from jupyter_server.auth import authorized +from jupyter_server.auth.decorator import authorized from ..base.handlers import JupyterHandler, path_regex -from ..utils import ensure_async, url_escape, url_path_join +from ..utils import url_escape, url_path_join AUTH_RESOURCE = "contents" @@ -19,6 +20,7 @@ class ViewHandler(JupyterHandler): @web.authenticated @authorized async def get(self, path): + """Get a view on a given path.""" path = path.strip("/") if not await ensure_async(self.contents_manager.file_exists(path)): raise web.HTTPError(404, "File does not exist: %s" % path) diff --git a/package-lock.json b/package-lock.json index 3e30b1eb86..546528db85 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,8 +1,405 @@ { "name": "jupyter_server", "version": "1.0.0", - "lockfileVersion": 1, + "lockfileVersion": 2, "requires": true, + "packages": { + "": { + "name": "jupyter_server", + "version": "1.0.0", + "license": "BSD", + "dependencies": { + "bootstrap": "^3.4.0", + "copyfiles": "^2.4.1" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/bootstrap": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-3.4.1.tgz", + "integrity": "sha512-yN5oZVmRCwe5aKwzRj6736nSmKDX7pLYwsXiCj/EYmo16hODaBiT4En5btW/jhBF/seV+XMx3aYwukYC3A49DA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/copyfiles": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/copyfiles/-/copyfiles-2.4.1.tgz", + "integrity": "sha512-fereAvAvxDrQDOXybk3Qu3dPbOoKoysFMWtkY3mv5BsL8//OSZVL5DCLYqgRfY5cWirgRzlC+WSrxp6Bo3eNZg==", + "dependencies": { + "glob": "^7.0.5", + "minimatch": "^3.0.3", + "mkdirp": "^1.0.4", + "noms": "0.0.0", + "through2": "^2.0.1", + "untildify": "^4.0.0", + "yargs": "^16.1.0" + }, + "bin": { + "copyfiles": "copyfiles", + "copyup": "copyfiles" + } + }, + "node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/noms": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/noms/-/noms-0.0.0.tgz", + "integrity": "sha1-2o69nzr51nYJGbJ9nNyAkqczKFk=", + "dependencies": { + "inherits": "^2.0.1", + "readable-stream": "~1.0.31" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "node_modules/string-width": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dependencies": { + "ansi-regex": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "node_modules/through2/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/through2/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/through2/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "engines": { + "node": ">=10" + } + } + }, "dependencies": { "ansi-regex": { "version": "5.0.1", @@ -141,9 +538,9 @@ "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" }, "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "requires": { "brace-expansion": "^1.1.7" } @@ -201,6 +598,11 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, "string-width": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", @@ -211,11 +613,6 @@ "strip-ansi": "^6.0.0" } }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" - }, "strip-ansi": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", diff --git a/package.json b/package.json index d4c9c73c2a..2e5fc7657d 100644 --- a/package.json +++ b/package.json @@ -9,5 +9,30 @@ "dependencies": { "bootstrap": "^3.4.0", "copyfiles": "^2.4.1" + }, + "eslintConfig": { + "parserOptions": { + "ecmaVersion": 6, + "sourceType": "module" + }, + "rules": { + "semi": 1, + "no-cond-assign": 2, + "no-debugger": 2, + "comma-dangle": 0, + "no-unreachable": 2 + } + }, + "eslintIgnore": [ + "*.min.js", + "*components*", + "*node_modules*", + "*built*", + "*build*" + ], + "babel": { + "presets": [ + "es2015" + ] } } diff --git a/pyproject.toml b/pyproject.toml index 6c549dda5b..68b10b04d9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,55 +1,297 @@ [build-system] -requires = ["jupyter_packaging~=0.9", "pre-commit"] -build-backend = "jupyter_packaging.build_api" +requires = ["hatchling >=1.11"] +build-backend = "hatchling.build" -[tool.jupyter-packaging.builder] -factory = "jupyter_packaging.npm_builder" +[project] +name = "jupyter_server" +dynamic = ["version"] +readme = "README.md" +license = { file = "LICENSE" } +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +authors = [{name = "Jupyter Development Team", email = "jupyter@googlegroups.com"}] +keywords = ["ipython", "jupyter"] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Framework :: Jupyter", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", +] +requires-python = ">=3.8" +dependencies = [ + "anyio>=3.1.0", + "argon2-cffi", + "jinja2", + "jupyter_client>=7.4.4", + "jupyter_core>=4.12,!=5.0.*", + "jupyter_server_terminals", + "nbconvert>=6.4.4", + "nbformat>=5.3.0", + "packaging", + "prometheus_client", + "pywinpty;os_name=='nt'", + "pyzmq>=24", + "Send2Trash>=1.8.2", + "terminado>=0.8.3", + "tornado>=6.2.0", + "traitlets>=5.6.0", + "websocket-client", + "jupyter_events>=0.9.0", + "overrides" +] + +[project.urls] +Homepage = "https://jupyter-server.readthedocs.io" +Documentation = "https://jupyter-server.readthedocs.io" +Funding = "https://numfocus.org/donate" +Source = "https://github.com/jupyter-server/jupyter_server" +Tracker = "https://github.com/jupyter-server/jupyter_server/issues" + +[project.optional-dependencies] +test = [ + "ipykernel", + "pytest-console-scripts", + "pytest-timeout", + "pytest-jupyter[server]>=0.4", + "pytest>=7.0", + "requests", + "pre-commit", + 'flaky' +] +docs = [ + "ipykernel", + "jinja2", + "jupyter_client", + "jupyter_server", + "myst-parser", + "nbformat", + "prometheus_client", + "pydata_sphinx_theme", + "Send2Trash", + "sphinxcontrib-openapi>=0.8.0", + "sphinxcontrib_github_alt", + "sphinxcontrib-spelling", + "sphinx-autodoc-typehints", + "sphinxemoji", + "tornado", + # workaround for an unknown downstream library that is now + # missing typing_extensions + "typing_extensions" +] + +[project.scripts] +jupyter-server = "jupyter_server.serverapp:main" + +[tool.hatch.envs.docs] +features = ["docs"] +[tool.hatch.envs.docs.scripts] +build = "make -C docs html SPHINXOPTS='-W'" +api = "sphinx-apidoc -o docs/source/api -f -E jupyter_server */terminal jupyter_server/pytest_plugin.py" + +[tool.hatch.envs.test] +features = ["test"] +[tool.hatch.envs.test.scripts] +test = "python -m pytest -vv {args}" +nowarn = "test -W default {args}" + +[tool.hatch.envs.typing] +dependencies = [ "pre-commit"] +detached = true +[tool.hatch.envs.typing.scripts] +test = "pre-commit run --all-files --hook-stage manual mypy" + +[tool.hatch.envs.cov] +features = ["test"] +dependencies = ["coverage[toml]", "pytest-cov"] +[tool.hatch.envs.cov.scripts] +test = "python -m pytest -vv --cov jupyter_server --cov-branch --cov-report term-missing:skip-covered {args}" +nowarn = "test -W default {args}" +integration = "test --integration_tests=true {args}" + +[tool.hatch.envs.lint] +detached = true +dependencies = ["pre-commit"] +[tool.hatch.envs.lint.scripts] +build = [ + "pre-commit run --all-files ruff", + "pre-commit run --all-files ruff-format", +] + +[tool.hatch.version] +path = "jupyter_server/_version.py" +validate-bump = false -[tool.check-manifest] -ignore = ["tbump.toml", ".*", "*.yml", "package-lock.json", "bootstrap*", "conftest.py"] +[tool.hatch.build] +artifacts = ["jupyter_server/static/style"] + +[tool.hatch.build.hooks.jupyter-builder] +dependencies = ["hatch-jupyter-builder>=0.8.1"] +build-function = "hatch_jupyter_builder.npm_builder" +ensured-targets = [ + "jupyter_server/static/style/bootstrap.min.css", + "jupyter_server/static/style/bootstrap-theme.min.css" +] +skip-if-exists = ["jupyter_server/static/style/bootstrap.min.css"] +install-pre-commit-hook = true +optional-editable-build = true + +[tool.ruff] +line-length = 100 + +[tool.ruff.lint] +select = [ + "B", # flake8-bugbear + "I", # isort + "C4", # flake8-comprehensions + "EM", # flake8-errmsg + "ICN", # flake8-import-conventions + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # pylint + "PT", # flake8-pytest-style + "RET", # flake8-return + "RUF", # Ruff-specific + "SIM", # flake8-simplify + "T20", # flake8-print + "UP", # pyupgrade + "YTT", # flake8-2020 + "EXE", # flake8-executable + "NPY", # NumPy specific rules + "PD", # pandas-vet + "PYI", # flake8-pyi +] +ignore = [ + # Allow non-abstract empty methods in abstract base classes + "B027", + # Use of `assert` detected + "S101", + # Use `contextlib.suppress(SchemaRegistryException)` instead of `try`-`except`-`pass` + "SIM105", + # Missing explicit `return` at the end of function able to return non-`None` value + "RET503" , + # Unnecessary assignment to + "RET504", + # Unnecessary `else` after `return` statement + "RET505", + # Unnecessary `elif` after `raise` statement + "RET506", + # Possible hardcoded password + "S105", + "S106", + "PLR", # Design related pylint codes + # RUF012 Mutable class attributes should be annotated with `typing.ClassVar` + "RUF012", + # Use `X | Y` for type annotations + "UP007", +] +unfixable = [ + # Don't touch print statements + "T201", + # Don't touch unused imports + "F401", + # Don't touch noqa lines + "RUF100", +] + +[tool.ruff.lint.per-file-ignores] +# B011 Do not call assert False since python -O removes these calls +# F841 local variable 'foo' is assigned to but never used +# C408 Unnecessary `dict` call +# S108 Probable insecure usage of temporary file or directory +# PLC1901 `ext_pkg.version == ""` can be simplified to `not ext_pkg.version` as an empty string is falsey +# B018 Found useless expression +"tests/*" = ["B011", "F841", "EM", "C", "T201", "S108", "PLC1901", "PTH", "ARG", "PT", "RET", "G", "PLW", "B018"] +# print should be used in applications +"**/*app.py" = ["T201"] +# Ignore flake 8 errors from shimmed imports +"jupyter_server/base/zmqhandlers.py" = ["F401"] +# PLR2004 Magic value used in comparison +"test_handlers.py" = ["PLR2004"] +# F821 Undefined name `c` +"**/*_config.py" = ["F821"] +# F401 `jupyter_server_terminals.TerminalAPIHandler` imported but unused +"jupyter_server/terminal/api_handlers.py" = ["F401"] +# S607 Starting a process with a partial executable path +# S603 `subprocess` call: check for execution of untrusted input +"jupyter_server/services/contents/filemanager.py" = ["S603", "S607"] +"tests/unix_sockets/test_serverapp_integration.py" = ["S603", "S607"] [tool.pytest.ini_options] -addopts = "-raXs --durations 10 --color=yes --doctest-modules" +minversion = "6.0" +xfail_strict = true +log_cli_level = "info" +addopts = [ + "-ra", "--durations=10", "--color=yes", "--doctest-modules", + "--showlocals", "--strict-markers", "--strict-config" +] testpaths = [ "tests/" ] -timeout = 300 -# Restore this setting to debug failures -# timeout_method = "thread" +timeout = 100 +# Restore this setting to debug failures. +timeout_method = "thread" filterwarnings = [ "error", - "ignore:There is no current event loop:DeprecationWarning", "ignore:Passing a schema to Validator.iter_errors:DeprecationWarning", - "ignore:unclosed \d+)\.(?P\d+)\.(?P\d+) - ((?Pa|b|rc|.dev)(?P\d+))? -''' - -[tool.tbump.git] -message_template = "Bump to {new_version}" -tag_template = "v{new_version}" +[tool.mypy] +python_version = "3.8" +explicit_package_bases = true +strict = true +pretty = true +warn_unreachable = true +disable_error_code = ["no-untyped-def", "no-untyped-call"] +enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] -[[tool.tbump.file]] -src = "jupyter_server/_version.py" -version_template = '({major}, {minor}, {patch}, "{channel}", "{release}")' +[tool.interrogate] +ignore-init-module=true +ignore-private=true +ignore-semiprivate=true +ignore-property-decorators=true +ignore-nested-functions=true +ignore-nested-classes=true +fail-under=95 +exclude = ["docs", "test"] -[[tool.tbump.field]] -name = "channel" -default = "" +[tool.check-wheel-contents] +ignore = ["W002"] -[[tool.tbump.field]] -name = "release" -default = "" +[tool.repo-review] +ignore = ["GH102"] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 886a9a1cf5..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,99 +0,0 @@ -[metadata] -name = jupyter_server -version = attr: jupyter_server.__version__ -description = The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications. -long_description = file: README.md -long_description_content_type = text/markdown -license_files = COPYING.md -author = Jupyter Development Team -author_email = jupyter@googlegroups.com -url = https://jupyter-server.readthedocs.io -platforms = Linux, Mac OS X, Windows -keywords = ipython, jupyter -classifiers = - Development Status :: 5 - Production/Stable - Framework :: Jupyter - Intended Audience :: Developers - Intended Audience :: Science/Research - Intended Audience :: System Administrators - License :: OSI Approved :: BSD License - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 -project_urls = - Documentation = https://jupyter-server.readthedocs.io - Funding = https://numfocus.org/donate - Source = https://github.com/jupyter-server/jupyter_server - Tracker = https://github.com/jupyter-server/jupyter_server/issues - -[options] -zip_safe = False -include_package_data = True -packages = find: -package_dir = - "" = "jupyter_server" -python_requires = >=3.7 -install_requires = - anyio>=3.1.0,<4 - argon2-cffi - jinja2 - jupyter_client>=6.1.12 - jupyter_core>=4.7.0 - nbconvert>=6.4.4 - nbformat>=5.2.0 - packaging - prometheus_client - pywinpty;os_name=='nt' - pyzmq>=17 - Send2Trash - terminado>=0.8.3 - tornado>=6.1.0 - traitlets>=5.1 - websocket-client - -[options.extras_require] -test = - coverage - ipykernel - pre-commit - pytest-console-scripts - pytest-cov - pytest-mock - pytest-timeout - pytest-tornasync - pytest>=6.0 - requests - requests - -[options.entry_points] -console_scripts = - jupyter-server = jupyter_server.serverapp:main - -[options.packages.find] -exclude = - docs.* - examples.* - tests - tests.* - -[flake8] -ignore = E501, W503, E402 -builtins = c, get_config -exclude = - .cache, - .github, - docs, - setup.py -enable-extensions = G -extend-ignore = - G001, G002, G004, G200, G201, G202, - # black adds spaces around ':' - E203, -per-file-ignores = - # B011: Do not call assert False since python -O removes these calls - # F841 local variable 'foo' is assigned to but never used - tests/*: B011, F841 diff --git a/setup.py b/setup.py deleted file mode 100644 index 499eb46d94..0000000000 --- a/setup.py +++ /dev/null @@ -1,25 +0,0 @@ -import subprocess -import sys - -from setuptools import setup - -try: - from jupyter_packaging import npm_builder, wrap_installers - - ensured_targets = ["jupyter_server/static/style/bootstrap.min.css"] - - def post_develop(*args, **kwargs): - npm_builder() - try: - subprocess.run([sys.executable, "-m", "pre_commit", "install"]) - subprocess.run( - [sys.executable, "-m", "pre_commit", "install", "--hook-type", "pre-push"] - ) - except Exception: - pass - - cmdclass = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets) -except ImportError: - cmdclass = {} - -setup(cmdclass=cmdclass) diff --git a/tests/auth/test_authorizer.py b/tests/auth/test_authorizer.py index 096437b47a..08c49eadf0 100644 --- a/tests/auth/test_authorizer.py +++ b/tests/auth/test_authorizer.py @@ -1,90 +1,34 @@ """Tests for authorization""" import json +import os import pytest from jupyter_client.kernelspec import NATIVE_KERNEL_NAME from nbformat import writes from nbformat.v4 import new_notebook -from tornado.httpclient import HTTPClientError -from tornado.websocket import WebSocketHandler -from jupyter_server.auth.authorizer import Authorizer -from jupyter_server.auth.utils import HTTP_METHOD_TO_AUTH_ACTION, match_url_to_resource from jupyter_server.services.security import csp_report_uri -class AuthorizerforTesting(Authorizer): +@pytest.fixture() +def jp_server_config(jp_server_authorizer): + return { + "ServerApp": {"authorizer_class": jp_server_authorizer}, + "jpserver_extensions": {"jupyter_server_terminals": True}, + } - # Set these class attributes from within a test - # to verify that they match the arguments passed - # by the REST API. - permissions = {} - def normalize_url(self, path): - """Drop the base URL and make sure path leads with a /""" - base_url = self.parent.base_url - # Remove base_url - if path.startswith(base_url): - path = path[len(base_url) :] - # Make sure path starts with / - if not path.startswith("/"): - path = "/" + path - return path - - def is_authorized(self, handler, user, action, resource): - # Parse Request - if isinstance(handler, WebSocketHandler): - method = "WEBSOCKET" - else: - method = handler.request.method - url = self.normalize_url(handler.request.path) - - # Map request parts to expected action and resource. - expected_action = HTTP_METHOD_TO_AUTH_ACTION[method] - expected_resource = match_url_to_resource(url) - - # Assert that authorization layer returns the - # correct action + resource. - assert action == expected_action - assert resource == expected_resource - - # Now, actually apply the authorization layer. - return all( - [ - action in self.permissions.get("actions", []), - resource in self.permissions.get("resources", []), - ] - ) - - -@pytest.fixture -def jp_server_config(): - return {"ServerApp": {"authorizer_class": AuthorizerforTesting}} - - -@pytest.fixture -def send_request(jp_fetch, jp_ws_fetch): - """Send to Jupyter Server and return response code.""" - - async def _(url, **fetch_kwargs): - if url.endswith("channels") or "/websocket/" in url: - fetch = jp_ws_fetch - else: - fetch = jp_fetch - - try: - r = await fetch(url, **fetch_kwargs, allow_nonstandard_methods=True) - code = r.code - except HTTPClientError as err: - code = err.code - else: - if fetch is jp_ws_fetch: - r.close() - - print(code, url, fetch_kwargs) - return code - - return _ +@pytest.fixture() +def jp_server_auth_resources(jp_server_auth_core_resources): + # terminal plugin doesn't have importable url patterns + # get these from terminal/__init__.py + for url_regex in [ + r"/terminals/websocket/(\w+)", + "/api/terminals", + r"/api/terminals/(\w+)", + ]: + jp_server_auth_core_resources[url_regex] = "terminals" + return jp_server_auth_core_resources HTTP_REQUESTS = [ @@ -205,7 +149,6 @@ async def test_authorized_requests( send_request, tmp_path, jp_serverapp, - jp_cleanup_subprocesses, method, url, body, @@ -228,12 +171,12 @@ async def test_authorized_requests( km = jp_serverapp.kernel_manager if "session" in url: - request.addfinalizer(lambda: io_loop.run_sync(km.shutdown_all)) session_model = await jp_serverapp.session_manager.create_session(path="foo") session_id = session_model["id"] if "kernel" in url: - request.addfinalizer(lambda: io_loop.run_sync(km.shutdown_all)) + if os.name == "nt": + pytest.skip("Test hangs on Windows") kernel_id = await km.start_kernel() kernel = km.get_kernel(kernel_id) # kernels take a moment to be ready @@ -245,7 +188,6 @@ async def test_authorized_requests( if "terminal" in url: term_manager = jp_serverapp.web_app.settings["terminal_manager"] - request.addfinalizer(lambda: io_loop.run_sync(term_manager.terminate_all)) term_model = term_manager.create() term_name = term_model["name"] @@ -275,5 +217,3 @@ async def test_authorized_requests( code = await send_request(url, body=body, method=method) assert code in expected_codes - - await jp_cleanup_subprocesses() diff --git a/tests/auth/test_identity.py b/tests/auth/test_identity.py new file mode 100644 index 0000000000..6f3af07060 --- /dev/null +++ b/tests/auth/test_identity.py @@ -0,0 +1,207 @@ +import json +import logging +from contextlib import nullcontext +from unittest import mock + +import pytest + +from jupyter_server.auth import IdentityProvider, User +from jupyter_server.auth.identity import PasswordIdentityProvider, _backward_compat_user +from jupyter_server.serverapp import ServerApp + + +class CustomUser: + def __init__(self, name): + self.name = name + + +@pytest.mark.parametrize( + "old_user, expected", + [ + ( + "str-name", + {"username": "str-name", "name": "str-name", "display_name": "str-name"}, + ), + ( + {"username": "user.username", "name": "user.name"}, + { + "username": "user.username", + "name": "user.name", + "display_name": "user.name", + }, + ), + ( + {"username": "user.username", "display_name": "display"}, + { + "username": "user.username", + "name": "user.username", + "display_name": "display", + }, + ), + ({"name": "user.name"}, {"username": "user.name", "name": "user.name"}), + ({"unknown": "value"}, ValueError), + (CustomUser("custom_name"), ValueError), + ], +) +def test_identity_model(old_user, expected): + if isinstance(expected, type) and issubclass(expected, Exception): + with pytest.raises(expected): + user = _backward_compat_user(old_user) + return + user = _backward_compat_user(old_user) + idp = IdentityProvider() + identity = idp.identity_model(user) + print(identity) + identity_subset = {key: identity[key] for key in expected} # type:ignore[union-attr] + print(type(identity), type(identity_subset), type(expected)) + assert identity_subset == expected + + +@pytest.mark.parametrize( + "fields, expected", + [ + ({"name": "user"}, TypeError), + ( + {"username": "user.username"}, + { + "username": "user.username", + "name": "user.username", + "initials": None, + "avatar_url": None, + "color": None, + }, + ), + ( + {"username": "user.username", "name": "user.name", "color": "#abcdef"}, + { + "username": "user.username", + "name": "user.name", + "display_name": "user.name", + "color": "#abcdef", + }, + ), + ( + {"username": "user.username", "display_name": "display"}, + { + "username": "user.username", + "name": "user.username", + "display_name": "display", + }, + ), + ], +) +def test_user_defaults(fields, expected): + if isinstance(expected, type) and issubclass(expected, Exception): + with pytest.raises(expected): + user = User(**fields) + return + user = User(**fields) + + # check expected fields + for key in expected: # type:ignore[union-attr] + assert getattr(user, key) == expected[key] # type:ignore[index] + + # check types + for key in ("username", "name", "display_name"): + value = getattr(user, key) + assert isinstance(value, str) + # don't allow empty strings + assert value + + for key in ("initials", "avatar_url", "color"): + value = getattr(user, key) + assert value is None or isinstance(value, str) + + +@pytest.fixture() +def identity_provider_class(): + """Allow override in other test modules""" + return PasswordIdentityProvider + + +@pytest.mark.parametrize( + "ip, token, ssl, warns", + [ + ("", "", None, "highly insecure"), + ("", "", {"key": "x"}, "all IP addresses"), + ("", "secret", None, "and not using encryption"), + ("", "secret", {"key": "x"}, False), + ("127.0.0.1", "secret", None, False), + ], +) +def test_validate_security( + identity_provider_class, + ip, + token, + ssl, + warns, + caplog, +): + app = ServerApp(ip=ip, log=logging.getLogger()) + idp = identity_provider_class(parent=app, token=token) + app.identity_provider = idp + + with caplog.at_level(logging.WARNING): + idp.validate_security(app, ssl_options=ssl) + for record in caplog.records: + print(record) + + if warns: + assert len(caplog.records) > 0 + if isinstance(warns, str): + logged = "\n".join(record.msg for record in caplog.records) + assert warns in logged + else: + assert len(caplog.records) == 0 + + +@pytest.mark.parametrize( + "password_set, password_required, ok", + [ + (True, False, True), + (True, True, True), + (False, False, True), + (False, True, False), + ], +) +def test_password_required(identity_provider_class, password_set, password_required, ok): + app = ServerApp() + idp = identity_provider_class( + parent=app, + hashed_password="xxx" if password_set else "", + password_required=password_required, + ) + app.identity_provider = idp + ctx = nullcontext() if ok else pytest.raises(SystemExit) + + with ctx: + idp.validate_security(app, ssl_options=None) + + +async def test_auth_disabled(request, jp_serverapp, jp_fetch): + idp = PasswordIdentityProvider( + parent=jp_serverapp, + hashed_password="", + token="", + ) + assert not idp.auth_enabled + + with mock.patch.dict(jp_serverapp.web_app.settings, {"identity_provider": idp}): + resp = await jp_fetch("/api/me", headers={"Authorization": "", "Cookie": ""}) + + user_info = json.loads(resp.body.decode("utf8")) + # anonymous login sets a cookie + assert "Set-Cookie" in resp.headers + cookie = resp.headers["Set-Cookie"] + + # second request, with cookie keeps the same anonymous user + resp = await jp_fetch("/api/me", headers={"Authorization": "", "Cookie": cookie}) + + user_info_repeat = json.loads(resp.body.decode("utf8")) + assert user_info_repeat["identity"] == user_info["identity"] + + # another request, no cookie, new anonymous user + resp = await jp_fetch("/api/me", headers={"Authorization": "", "Cookie": ""}) + + user_info_2 = json.loads(resp.body.decode("utf8")) + assert user_info_2["identity"]["username"] != user_info["identity"]["username"] diff --git a/tests/auth/test_legacy_login.py b/tests/auth/test_legacy_login.py new file mode 100644 index 0000000000..be139fe707 --- /dev/null +++ b/tests/auth/test_legacy_login.py @@ -0,0 +1,108 @@ +""" +Test legacy login config via ServerApp.login_handler_class +""" + +import json + +import pytest +from traitlets.config import Config + +from jupyter_server.auth.identity import LegacyIdentityProvider +from jupyter_server.auth.login import LoginHandler +from jupyter_server.auth.security import passwd +from jupyter_server.serverapp import ServerApp + +# re-run some login tests with legacy login config +from .test_identity import test_password_required, test_validate_security +from .test_login import login, test_change_password, test_login_cookie, test_logout + +# Don't raise on deprecation warnings in this module testing deprecated behavior +pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning") + + +class CustomLoginHandler(LoginHandler): + @classmethod + def get_user(cls, handler): + header_user = handler.request.headers.get("test-user") + if header_user: + if header_user == "super": + return super().get_user(handler) + return header_user + else: + return None + + +@pytest.fixture() +def login_headers(): + return {"test-user": "super"} + + +@pytest.fixture() +def jp_server_config(): + cfg = Config() + cfg.ServerApp.login_handler_class = CustomLoginHandler + return cfg + + +@pytest.fixture() +def identity_provider_class(): + # for tests imported from test_identity.py + return LegacyIdentityProvider + + +def test_legacy_identity_config(jp_serverapp): + # setting login_handler_class sets LegacyIdentityProvider + app = ServerApp() + idp = jp_serverapp.identity_provider + assert type(idp) is LegacyIdentityProvider + assert idp.login_available + assert idp.auth_enabled + assert idp.token + assert idp.get_handlers() == [ + ("/login", idp.login_handler_class), + ("/logout", idp.logout_handler_class), + ] + + +async def test_legacy_identity_api(jp_serverapp, jp_fetch): + response = await jp_fetch("/api/me", headers={"test-user": "pinecone"}) + assert response.code == 200 + model = json.loads(response.body.decode("utf8")) + assert model["identity"]["username"] == "pinecone" + + +async def test_legacy_base_class(jp_serverapp, jp_fetch): + response = await jp_fetch("/api/me", headers={"test-user": "super"}) + assert "Set-Cookie" in response.headers + cookie = response.headers["Set-Cookie"] + assert response.code == 200 + model = json.loads(response.body.decode("utf8")) + user_id = model["identity"]["username"] # a random uuid + assert user_id + + response = await jp_fetch("/api/me", headers={"test-user": "super", "Cookie": cookie}) + model2 = json.loads(response.body.decode("utf8")) + # second request, should trigger cookie auth + assert model2["identity"] == model["identity"] + + +def test_deprecated_config(jp_configurable_serverapp): + cfg = Config() + cfg.ServerApp.token = token = "asdf" + cfg.ServerApp.password = password = passwd("secrets") + app = jp_configurable_serverapp(config=cfg) + assert app.identity_provider.token == token + assert app.token == token + assert app.identity_provider.hashed_password == password + assert app.password == password + + +def test_deprecated_config_priority(jp_configurable_serverapp): + cfg = Config() + cfg.ServerApp.token = "ignored" + cfg.IdentityProvider.token = token = "idp_token" + cfg.ServerApp.password = passwd("ignored") + cfg.PasswordIdentityProvider.hashed_password = password = passwd("used") + app = jp_configurable_serverapp(config=cfg) + assert app.identity_provider.token == token + assert app.identity_provider.hashed_password == password diff --git a/tests/auth/test_login.py b/tests/auth/test_login.py index 0b918d91d5..7aad3129ca 100644 --- a/tests/auth/test_login.py +++ b/tests/auth/test_login.py @@ -1,4 +1,5 @@ """Tests for login redirects""" +import json from functools import partial from urllib.parse import urlencode @@ -10,12 +11,12 @@ # override default config to ensure a non-empty base url is used -@pytest.fixture +@pytest.fixture() def jp_base_url(): return "/a%40b/" -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_base_url): return { "ServerApp": { @@ -24,42 +25,63 @@ def jp_server_config(jp_base_url): } -async def _login(jp_serverapp, http_server_client, jp_base_url, next): +async def _login( + jp_serverapp, + http_server_client, + jp_base_url, + login_headers, + next="/", + password=None, + new_password=None, +): # first: request login page with no creds login_url = url_path_join(jp_base_url, "login") first = await http_server_client.fetch(login_url) cookie_header = first.headers["Set-Cookie"] cookies = parse_cookie(cookie_header) + form = {"_xsrf": cookies.get("_xsrf")} + if password is None: + password = jp_serverapp.identity_provider.token + if password: + form["password"] = password + if new_password: + form["new_password"] = new_password # second, submit login form with credentials try: resp = await http_server_client.fetch( url_concat(login_url, {"next": next}), method="POST", - body=urlencode( - { - "password": jp_serverapp.token, - "_xsrf": cookies.get("_xsrf", ""), - } - ), + body=urlencode(form), headers={"Cookie": cookie_header}, follow_redirects=False, ) except HTTPClientError as e: if e.code != 302: raise - return e.response.headers["Location"] + assert e.response is not None + resp = e.response else: assert resp.code == 302, "Should have returned a redirect!" + return resp -@pytest.fixture -def login(jp_serverapp, http_server_client, jp_base_url): +@pytest.fixture() +def login_headers(): + """Extra headers to pass to login + + Fixture so it can be overridden + """ + return {} + + +@pytest.fixture() +def login(jp_serverapp, http_server_client, jp_base_url, login_headers): """Fixture to return a function to login to a Jupyter server by submitting the login page form """ - yield partial(_login, jp_serverapp, http_server_client, jp_base_url) + return partial(_login, jp_serverapp, http_server_client, jp_base_url, login_headers) @pytest.mark.parametrize( @@ -70,11 +92,13 @@ def login(jp_serverapp, http_server_client, jp_base_url): "//host{base_url}tree", "https://google.com", "/absolute/not/base_url", + "https:///a%40b/extra/slash", ), ) async def test_next_bad(login, jp_base_url, bad_next): bad_next = bad_next.format(base_url=jp_base_url) - url = await login(bad_next) + resp = await login(bad_next) + url = resp.headers["Location"] assert url == jp_base_url @@ -90,5 +114,76 @@ async def test_next_bad(login, jp_base_url, bad_next): async def test_next_ok(login, jp_base_url, next_path): next_path = next_path.format(base_url=jp_base_url) expected = jp_base_url + next_path - actual = await login(next=expected) + resp = await login(next=expected) + actual = resp.headers["Location"] assert actual == expected + + +async def test_login_cookie(login, jp_serverapp, jp_fetch, login_headers): + resp = await login() + assert "Set-Cookie" in resp.headers + cookie = resp.headers["Set-Cookie"] + headers = {"Cookie": cookie} + headers.update(login_headers) + id_resp = await jp_fetch("/api/me", headers=headers) + assert id_resp.code == 200 + model = json.loads(id_resp.body.decode("utf8")) + assert model["identity"]["username"] + with pytest.raises(HTTPClientError) as exc: + resp = await login(password="incorrect") + assert exc.value.code == 401 + + +@pytest.mark.parametrize("allow_password_change", [True, False]) +async def test_change_password(login, jp_serverapp, jp_base_url, jp_fetch, allow_password_change): + new_password = "super-new-pass" + jp_serverapp.identity_provider.allow_password_change = allow_password_change + resp = await login(new_password=new_password) + + # second request + if allow_password_change: + resp = await login(password=new_password) + assert resp.code == 302 + else: + with pytest.raises(HTTPClientError) as exc_info: + resp = await login(password=new_password) + assert exc_info.value.code == 401 + + +async def test_logout(jp_serverapp, login, http_server_client, jp_base_url): + jp_serverapp.identity_provider.cookie_name = "test-cookie" + expected = jp_base_url + resp = await login(next=jp_base_url) + cookie_header = resp.headers["Set-Cookie"] + cookies = parse_cookie(cookie_header) + assert cookies.get("test-cookie") + + resp = await http_server_client.fetch(jp_base_url + "logout", headers={"Cookie": cookie_header}) + assert resp.code == 200 + cookie_header = resp.headers["Set-Cookie"] + cookies = parse_cookie(cookie_header) + assert not cookies.get("test-cookie") + assert "Successfully logged out" in resp.body.decode("utf8") + + +async def test_token_cookie_user_id(jp_serverapp, jp_fetch): + token = jp_serverapp.identity_provider.token + + # first request with token, sets cookie with user-id + resp = await jp_fetch("/") + assert resp.code == 200 + set_cookie = resp.headers["set-cookie"] + headers = {"Cookie": set_cookie} + + # subsequent requests with cookie and no token + # receive same user-id + resp = await jp_fetch("/api/me", headers=headers) + user_id = json.loads(resp.body.decode("utf8")) + resp = await jp_fetch("/api/me", headers=headers) + user_id2 = json.loads(resp.body.decode("utf8")) + assert user_id["identity"] == user_id2["identity"] + + # new request, just token -> new user_id + resp = await jp_fetch("/api/me") + user_id3 = json.loads(resp.body.decode("utf8")) + assert user_id["identity"] != user_id3["identity"] diff --git a/tests/base/test_call_context.py b/tests/base/test_call_context.py new file mode 100644 index 0000000000..1c12338d61 --- /dev/null +++ b/tests/base/test_call_context.py @@ -0,0 +1,109 @@ +import asyncio + +from jupyter_server import CallContext +from jupyter_server.auth.utils import get_anonymous_username +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager + + +async def test_jupyter_handler_contextvar(jp_fetch, monkeypatch): + # Create some mock kernel Ids + kernel1 = "x-x-x-x-x" + kernel2 = "y-y-y-y-y" + + # We'll use this dictionary to track the current user within each request. + context_tracker = { + kernel1: {"started": "no user yet", "ended": "still no user", "user": None}, + kernel2: {"started": "no user yet", "ended": "still no user", "user": None}, + } + + # Monkeypatch the get_current_user method in Tornado's + # request handler to return a random user name for + # each request + async def get_current_user(self): + return get_anonymous_username() + + monkeypatch.setattr(JupyterHandler, "get_current_user", get_current_user) + + # Monkeypatch the kernel_model method to show that + # the current context variable is truly local and + # not contaminated by other asynchronous parallel requests. + # Note that even though the current implementation of `kernel_model()` + # is synchronous, we can convert this into an async method because the + # kernel handler wraps the call to `kernel_model()` in `ensure_async()`. + async def kernel_model(self, kernel_id): + # Get the Jupyter Handler from the current context. + current: JupyterHandler = CallContext.get(CallContext.JUPYTER_HANDLER) + # Get the current user + context_tracker[kernel_id]["user"] = current.current_user + context_tracker[kernel_id]["started"] = current.current_user + await asyncio.sleep(1.0) + # Track the current user a few seconds later. We'll + # verify that this user was unaffected by other parallel + # requests. + context_tracker[kernel_id]["ended"] = current.current_user + return {"id": kernel_id, "name": "blah"} + + monkeypatch.setattr(AsyncMappingKernelManager, "kernel_model", kernel_model) + + # Make two requests in parallel. + await asyncio.gather( + jp_fetch("api", "kernels", kernel1), + jp_fetch("api", "kernels", kernel2), + ) + + # Assert that the two requests had different users + assert context_tracker[kernel1]["user"] != context_tracker[kernel2]["user"] + # Assert that the first request started+ended with the same user + assert context_tracker[kernel1]["started"] == context_tracker[kernel1]["ended"] + # Assert that the second request started+ended with the same user + assert context_tracker[kernel2]["started"] == context_tracker[kernel2]["ended"] + + +async def test_context_variable_names(): + CallContext.set("foo", "bar") + CallContext.set("foo2", "bar2") + names = CallContext.context_variable_names() + assert len(names) == 2 + assert set(names) == {"foo", "foo2"} + + +async def test_same_context_operations(): + CallContext.set("foo", "bar") + CallContext.set("foo2", "bar2") + + foo = CallContext.get("foo") + assert foo == "bar" + + CallContext.set("foo", "bar2") + assert CallContext.get("foo") == CallContext.get("foo2") + + +async def test_multi_context_operations(): + async def context1(): + """The "slower" context. This ensures that, following the sleep, the + context variable set prior to the sleep is still the expected value. + If contexts are not managed properly, we should find that context2() has + corrupted context1(). + """ + CallContext.set("foo", "bar1") + await asyncio.sleep(1.0) + assert CallContext.get("foo") == "bar1" + context1_names = CallContext.context_variable_names() + assert len(context1_names) == 1 + + async def context2(): + """The "faster" context. This ensures that CallContext reflects the + appropriate values of THIS context. + """ + CallContext.set("foo", "bar2") + assert CallContext.get("foo") == "bar2" + CallContext.set("foo2", "bar2") + context2_names = CallContext.context_variable_names() + assert len(context2_names) == 2 + + await asyncio.gather(context1(), context2()) + + # Assert that THIS context doesn't have any variables defined. + names = CallContext.context_variable_names() + assert len(names) == 0 diff --git a/tests/base/test_handlers.py b/tests/base/test_handlers.py new file mode 100644 index 0000000000..370100fe9d --- /dev/null +++ b/tests/base/test_handlers.py @@ -0,0 +1,153 @@ +"""Test Base Handlers""" +import os +import warnings +from unittest.mock import MagicMock + +from tornado.httpserver import HTTPRequest +from tornado.httputil import HTTPHeaders + +from jupyter_server.auth import AllowAllAuthorizer, IdentityProvider +from jupyter_server.base.handlers import ( + APIHandler, + APIVersionHandler, + AuthenticatedFileHandler, + AuthenticatedHandler, + FileFindHandler, + FilesRedirectHandler, + JupyterHandler, + RedirectWithParams, +) +from jupyter_server.serverapp import ServerApp + + +def test_authenticated_handler(jp_serverapp): + app: ServerApp = jp_serverapp + request = HTTPRequest("OPTIONS") + request.connection = MagicMock() + handler = AuthenticatedHandler(app.web_app, request) + for key in list(handler.settings): + del handler.settings[key] + handler.settings["headers"] = {"Content-Security-Policy": "foo"} + + assert handler.content_security_policy == "foo" + assert handler.skip_check_origin() + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + assert handler.login_handler == handler.identity_provider.login_handler_class + assert isinstance(handler.authorizer, AllowAllAuthorizer) + assert isinstance(handler.identity_provider, IdentityProvider) + + +def test_jupyter_handler(jp_serverapp): + app: ServerApp = jp_serverapp + headers = HTTPHeaders({"Origin": "foo"}) + request = HTTPRequest("OPTIONS", headers=headers) + request.connection = MagicMock() + handler = JupyterHandler(app.web_app, request) + for key in list(handler.settings): + del handler.settings[key] + handler.settings["mathjax_url"] = "foo" + handler.settings["mathjax_config"] = "bar" + assert handler.mathjax_url == "/foo" + assert handler.mathjax_config == "bar" + handler.settings["terminal_manager"] = None + assert handler.terminal_manager is None + handler.settings["allow_origin"] = True # type:ignore[unreachable] + handler.set_cors_headers() + handler.settings["allow_origin"] = False + handler.settings["allow_origin_pat"] = "foo" + handler.settings["allow_credentials"] = True + handler.set_cors_headers() + assert handler.check_referer() is True + + +def test_api_handler(jp_serverapp): + app: ServerApp = jp_serverapp + headers = HTTPHeaders({"Origin": "foo"}) + request = HTTPRequest("OPTIONS", headers=headers) + request.connection = MagicMock() + handler = APIHandler(app.web_app, request) + for key in list(handler.settings): + del handler.settings[key] + handler.options() + + +async def test_authenticated_file_handler(jp_serverapp, tmpdir): + app: ServerApp = jp_serverapp + headers = HTTPHeaders({"Origin": "foo"}) + request = HTTPRequest("HEAD", headers=headers) + request.connection = MagicMock() + test_file = tmpdir / "foo" + with open(test_file, "w") as fid: + fid.write("hello") + + handler = AuthenticatedFileHandler(app.web_app, request, path=str(tmpdir)) + for key in list(handler.settings): + if key != "contents_manager": + del handler.settings[key] + handler.check_xsrf_cookie = MagicMock() # type:ignore[method-assign] + handler._jupyter_current_user = "foo" # type:ignore[assignment] + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + head = handler.head("foo") + if head: + await head + assert handler.get_status() == 200 + + +async def test_api_version_handler(jp_serverapp): + app: ServerApp = jp_serverapp + request = HTTPRequest("GET") + request.connection = MagicMock() + handler = APIVersionHandler(app.web_app, request) + handler._transforms = [] + handler.get() + assert handler.get_status() == 200 + + +async def test_files_redirect_handler(jp_serverapp): + app: ServerApp = jp_serverapp + request = HTTPRequest("GET") + request.connection = MagicMock() + test_file = os.path.join(app.contents_manager.root_dir, "foo") + with open(test_file, "w") as fid: + fid.write("hello") + handler = FilesRedirectHandler(app.web_app, request) + handler._transforms = [] + await handler.get("foo") + assert handler.get_status() == 302 + + +def test_redirect_with_params(jp_serverapp): + app: ServerApp = jp_serverapp + request = HTTPRequest("GET") + request.connection = MagicMock() + request.query = "foo" + handler = RedirectWithParams(app.web_app, request, url="foo") + handler._transforms = [] + handler.get() + assert handler.get_status() == 301 + + +async def test_static_handler(jp_serverapp, tmpdir): + async def async_magic(): + pass + + MagicMock.__await__ = lambda x: async_magic().__await__() + + test_file = tmpdir / "foo" + with open(test_file, "w") as fid: + fid.write("hello") + + app: ServerApp = jp_serverapp + request = HTTPRequest("GET", str(test_file)) + request.connection = MagicMock() + + handler = FileFindHandler(app.web_app, request, path=str(tmpdir)) + handler._transforms = [] + await handler.get("foo") + assert handler._headers["Cache-Control"] == "no-cache" + + handler.settings["static_immutable_cache"] = [str(tmpdir)] + await handler.get("foo") + assert handler._headers["Cache-Control"] == "public, max-age=31536000, immutable" diff --git a/tests/base/test_websocket.py b/tests/base/test_websocket.py new file mode 100644 index 0000000000..ee6ee3ee62 --- /dev/null +++ b/tests/base/test_websocket.py @@ -0,0 +1,62 @@ +"""Test Base Websocket classes""" +import logging +import time +from unittest.mock import MagicMock + +import pytest +from tornado.httpserver import HTTPRequest +from tornado.httputil import HTTPHeaders +from tornado.websocket import WebSocketClosedError, WebSocketHandler + +from jupyter_server.base.websocket import WebSocketMixin +from jupyter_server.serverapp import ServerApp + + +class MockHandler(WebSocketMixin, WebSocketHandler): + allow_origin = "*" + allow_origin_pat = "" + log = logging.getLogger() + + +@pytest.fixture() +def mixin(jp_serverapp): + app: ServerApp = jp_serverapp + headers = HTTPHeaders({"Host": "foo"}) + request = HTTPRequest("GET", headers=headers) + request.connection = MagicMock() + return MockHandler(app.web_app, request) + + +def test_web_socket_mixin(mixin): + assert mixin.check_origin("foo") is True + mixin.allow_origin = "" + assert mixin.check_origin("") is False + mixin.allow_origin_pat = "foo" + assert mixin.check_origin("foo") is True + mixin.clear_cookie() + assert mixin.get_status() == 200 + + +def test_web_socket_mixin_ping(mixin): + mixin.ws_connection = MagicMock() + mixin.ws_connection.is_closing = lambda: False + mixin.send_ping() + + +def test_ping_client_terminated(mixin): + mixin.ws_connection = MagicMock() + mixin.ws_connection.client_terminated = True + mixin.send_ping() + with pytest.raises(WebSocketClosedError): + mixin.write_message("hello") + + +async def test_ping_client_timeout(mixin): + mixin.on_pong("foo") + mixin.settings["ws_ping_timeout"] = 0.1 + time.sleep(0.3) + mixin.ws_connection = MagicMock() + mixin.ws_connection.is_closing = lambda: False + mixin.send_ping() + with pytest.raises(WebSocketClosedError): + mixin.write_message("hello") diff --git a/tests/conftest.py b/tests/conftest.py index 4fc60c4049..f50aa797db 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,8 @@ import os import pytest +from nbformat import writes +from nbformat.v4 import new_notebook from tests.extension.mockextensions.app import MockExtensionApp @@ -31,11 +33,10 @@ def pytest_runtest_setup(item): if item.config.getoption("--integration_tests") is True: if not is_integration_test: pytest.skip("Only running tests marked as 'integration_test'.") - else: - if is_integration_test: - pytest.skip( - "Skipping this test because it's marked 'integration_test'. Run integration tests using the `--integration_tests` flag." - ) + elif is_integration_test: + pytest.skip( + "Skipping this test because it's marked 'integration_test'. Run integration tests using the `--integration_tests` flag." + ) mock_html = """ @@ -61,18 +62,18 @@ def pytest_runtest_setup(item): """ -@pytest.fixture +@pytest.fixture() def mock_template(jp_template_dir): index = jp_template_dir.joinpath("index.html") index.write_text(mock_html) -@pytest.fixture +@pytest.fixture() def extension_manager(jp_serverapp): return jp_serverapp.extension_manager -@pytest.fixture +@pytest.fixture() def config_file(jp_config_dir): """""" f = jp_config_dir.joinpath("jupyter_mockextension_config.py") @@ -84,3 +85,58 @@ def config_file(jp_config_dir): def jp_mockextension_cleanup(): yield MockExtensionApp.clear_instance() + + +@pytest.fixture() +def contents_dir(tmp_path, jp_serverapp): + return tmp_path / jp_serverapp.root_dir + + +dirs = [ + ("", "inroot"), + ("Directory with spaces in", "inspace"), + ("unicodé", "innonascii"), + ("foo", "a"), + ("foo", "b"), + ("foo", "name with spaces"), + ("foo", "unicodé"), + ("foo/bar", "baz"), + ("ordering", "A"), + ("ordering", "b"), + ("ordering", "C"), + ("å b", "ç d"), +] + + +@pytest.fixture() +def contents(contents_dir): + # Create files in temporary directory + paths: dict = {"notebooks": [], "textfiles": [], "blobs": [], "contents_dir": contents_dir} + for d, name in dirs: + p = contents_dir / d + p.mkdir(parents=True, exist_ok=True) + + # Create a notebook + nb = writes(new_notebook(), version=4) + nbname = p.joinpath(f"{name}.ipynb") + nbname.write_text(nb, encoding="utf-8") + paths["notebooks"].append(nbname.relative_to(contents_dir)) + + # Create a text file + txt = f"{name} text file" + txtname = p.joinpath(f"{name}.txt") + txtname.write_text(txt, encoding="utf-8") + paths["textfiles"].append(txtname.relative_to(contents_dir)) + + # Create a random blob + blob = name.encode("utf-8") + b"\xFF" + blobname = p.joinpath(f"{name}.blob") + blobname.write_bytes(blob) + paths["blobs"].append(blobname.relative_to(contents_dir)) + paths["all"] = list(paths.values()) + return paths + + +@pytest.fixture() +def folders(): + return list({item[0] for item in dirs}) diff --git a/tests/extension/mockextensions/app.py b/tests/extension/mockextensions/app.py index 6b3fc95a9b..26f38464cd 100644 --- a/tests/extension/mockextensions/app.py +++ b/tests/extension/mockextensions/app.py @@ -1,26 +1,39 @@ +from __future__ import annotations + import os +from jupyter_events import EventLogger +from jupyter_events.schema_registry import SchemaRegistryException from traitlets import List, Unicode from jupyter_server.base.handlers import JupyterHandler from jupyter_server.extension.application import ExtensionApp, ExtensionAppJinjaMixin -from jupyter_server.extension.handler import ( - ExtensionHandlerJinjaMixin, - ExtensionHandlerMixin, -) +from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin, ExtensionHandlerMixin STATIC_PATH = os.path.join(os.path.dirname(__file__), "static") -# Function that makes these extensions discoverable -# by the test functions. +EVENT_SCHEMA = """\ +$id: https://events.jupyter.org/mockapp/v1/test +version: 1 +properties: + msg: + type: string +required: +- msg +""" +# Function that makes these extensions discoverable +# by the test functions. def _jupyter_server_extension_points(): return [{"module": __name__, "app": MockExtensionApp}] class MockExtensionHandler(ExtensionHandlerMixin, JupyterHandler): def get(self): + self.event_logger.emit( + schema_id="https://events.jupyter.org/mockapp/v1/test", data={"msg": "Hello, world!"} + ) self.finish(self.config.mock_trait) @@ -32,10 +45,9 @@ def get(self): class MockExtensionApp(ExtensionAppJinjaMixin, ExtensionApp): - name = "mockextension" - template_paths = List().tag(config=True) - static_paths = [STATIC_PATH] + template_paths: List[str] = List().tag(config=True) # type:ignore[assignment] + static_paths = [STATIC_PATH] # type:ignore[assignment] mock_trait = Unicode("mock trait", config=True) loaded = False @@ -45,6 +57,15 @@ class MockExtensionApp(ExtensionAppJinjaMixin, ExtensionApp): def get_extension_package(): return "tests.extension.mockextensions" + def initialize_settings(self): + # Only add this event if it hasn't already been added. + # Log the error if it fails, but don't crash the app. + try: + elogger: EventLogger = self.serverapp.event_logger # type:ignore[union-attr, assignment] + elogger.register_event_schema(EVENT_SCHEMA) + except SchemaRegistryException as err: + self.log.error(err) + def initialize_handlers(self): self.handlers.append(("/mock", MockExtensionHandler)) self.handlers.append(("/mock_template", MockExtensionTemplateHandler)) diff --git a/tests/extension/mockextensions/mockext_deprecated.py b/tests/extension/mockextensions/mockext_deprecated.py new file mode 100644 index 0000000000..527ac3a8bb --- /dev/null +++ b/tests/extension/mockextensions/mockext_deprecated.py @@ -0,0 +1,12 @@ +"""A mock extension named `mockext_py` for testing purposes. +""" +# Function that makes these extensions discoverable +# by the test functions. + + +def _jupyter_server_extension_paths(): + return [{"module": "tests.extension.mockextensions.mockext_deprecated"}] + + +def load_jupyter_server_extension(serverapp): + pass diff --git a/tests/extension/test_app.py b/tests/extension/test_app.py index 88a423f252..de52924df3 100644 --- a/tests/extension/test_app.py +++ b/tests/extension/test_app.py @@ -1,13 +1,17 @@ +import json +from io import StringIO +from logging import StreamHandler +from typing import Any + import pytest from traitlets.config import Config from jupyter_server.serverapp import ServerApp -from jupyter_server.utils import run_sync from .mockextensions.app import MockExtensionApp -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_template_dir): config = { "ServerApp": { @@ -21,7 +25,7 @@ def jp_server_config(jp_template_dir): return config -@pytest.fixture +@pytest.fixture() def mock_extension(extension_manager): name = "tests.extension.mockextensions" pkg = extension_manager.extensions[name] @@ -78,7 +82,7 @@ def test_extensionapp_no_parent(): assert app.serverapp is not None -OPEN_BROWSER_COMBINATIONS = ( +OPEN_BROWSER_COMBINATIONS: Any = ( (True, {}), (True, {"ServerApp": {"open_browser": True}}), (False, {"ServerApp": {"open_browser": False}}), @@ -116,12 +120,12 @@ def test_extensionapp_no_parent(): @pytest.mark.parametrize("expected_value, config", OPEN_BROWSER_COMBINATIONS) -def test_browser_open(monkeypatch, jp_environ, config, expected_value): +async def test_browser_open(monkeypatch, jp_environ, config, expected_value): serverapp = MockExtensionApp.initialize_server(config=Config(config)) assert serverapp.open_browser == expected_value -def test_load_parallel_extensions(monkeypatch, jp_environ): +async def test_load_parallel_extensions(monkeypatch, jp_environ): serverapp = MockExtensionApp.initialize_server() exts = serverapp.extension_manager.extensions assert "tests.extension.mockextensions.mock1" in exts @@ -132,7 +136,7 @@ def test_load_parallel_extensions(monkeypatch, jp_environ): assert exts["tests.extension.mockextensions"] -def test_stop_extension(jp_serverapp, caplog): +async def test_stop_extension(jp_serverapp, caplog): """Test the stop_extension method. This should be fired by ServerApp.cleanup_extensions. @@ -140,9 +144,11 @@ def test_stop_extension(jp_serverapp, caplog): calls = 0 # load extensions (make sure we only have the one extension loaded + # as well as jp_serverapp.extension_manager.load_all_extensions() extension_name = "tests.extension.mockextensions" - assert list(jp_serverapp.extension_manager.extension_apps) == [extension_name] + apps = set(jp_serverapp.extension_manager.extension_apps) + assert apps == {"jupyter_server_terminals", extension_name} # add a stop_extension method for the extension app async def _stop(*args): @@ -156,12 +162,29 @@ async def _stop(*args): # call cleanup_extensions, check the logging is correct caplog.clear() - run_sync(jp_serverapp.cleanup_extensions()) - assert [msg for *_, msg in caplog.record_tuples] == [ - "Shutting down 1 extension", - f'{extension_name} | extension app "mockextension" stopping', - f'{extension_name} | extension app "mockextension" stopped', - ] - - # check the shutdown method was called once - assert calls == 1 + await jp_serverapp.cleanup_extensions() + assert {msg for *_, msg in caplog.record_tuples} == { + "Shutting down 2 extensions", + "jupyter_server_terminals | extension app 'jupyter_server_terminals' stopping", + f"{extension_name} | extension app 'mockextension' stopping", + "jupyter_server_terminals | extension app 'jupyter_server_terminals' stopped", + f"{extension_name} | extension app 'mockextension' stopped", + } + + # check the shutdown method was called twice + assert calls == 2 + + +async def test_events(jp_serverapp, jp_fetch): + stream = StringIO() + handler = StreamHandler(stream) + jp_serverapp.event_logger.register_handler(handler) + + await jp_fetch("mock") + + handler.flush() + output = json.loads(stream.getvalue()) + # Clear the sink. + stream.truncate(0) + stream.seek(0) + assert output["msg"] == "Hello, world!" diff --git a/tests/extension/test_config.py b/tests/extension/test_config.py index 5667c3efae..ac7a75aedc 100644 --- a/tests/extension/test_config.py +++ b/tests/extension/test_config.py @@ -9,7 +9,7 @@ pytestmark = pytest.mark.usefixtures("jp_environ") -@pytest.fixture +@pytest.fixture() def configd(jp_env_config_path): """A pathlib.Path object that acts like a jupyter_server_config.d folder.""" configd = jp_env_config_path.joinpath("jupyter_server_config.d") @@ -28,7 +28,7 @@ def configd(jp_env_config_path): """ -@pytest.fixture +@pytest.fixture() def ext1_config(configd): config = configd.joinpath("ext1_config.json") config.write_text(ext1_json_config) @@ -45,7 +45,7 @@ def ext1_config(configd): """ -@pytest.fixture +@pytest.fixture() def ext2_config(configd): config = configd.joinpath("ext2_config.json") config.write_text(ext2_json_config) diff --git a/tests/extension/test_entrypoint.py b/tests/extension/test_entrypoint.py index 7e0974c1c3..353194426d 100644 --- a/tests/extension/test_entrypoint.py +++ b/tests/extension/test_entrypoint.py @@ -6,9 +6,11 @@ def test_server_extension_list(jp_environ, script_runner): ret = script_runner.run( - "jupyter", - "server", - "extension", - "list", + [ + "jupyter", + "server", + "extension", + "list", + ] ) assert ret.success diff --git a/tests/extension/test_handler.py b/tests/extension/test_handler.py index 870e311b4a..3151cf2b4d 100644 --- a/tests/extension/test_handler.py +++ b/tests/extension/test_handler.py @@ -1,7 +1,7 @@ import pytest -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_template_dir): return { "ServerApp": {"jpserver_extensions": {"tests.extension.mockextensions": True}}, diff --git a/tests/extension/test_launch.py b/tests/extension/test_launch.py index cb07ea1c69..de94607187 100644 --- a/tests/extension/test_launch.py +++ b/tests/extension/test_launch.py @@ -14,17 +14,17 @@ HERE = os.path.dirname(os.path.abspath(__file__)) -@pytest.fixture +@pytest.fixture() def port(): return 9999 -@pytest.fixture +@pytest.fixture() def token(): return hexlify(os.urandom(4)).decode("ascii") -@pytest.fixture +@pytest.fixture() def auth_header(token): return {"Authorization": "token %s" % token} @@ -42,7 +42,7 @@ def wait_up(url, interval=0.1, check=None): break -@pytest.fixture +@pytest.fixture() def launch_instance(request, port, token): def _run_in_subprocess(argv=None, add_token=True): argv = argv or [] @@ -61,7 +61,7 @@ def _kill_extension_app(): fid.close() if add_token: - f'--ServerApp.token="{token}"', + argv.append(f'--IdentityProvider.token="{token}"') root = Path(HERE).parent.parent @@ -86,7 +86,7 @@ def _kill_extension_app(): return _run_in_subprocess -@pytest.fixture +@pytest.fixture() def fetch(port, auth_header): def _get(endpoint): url = f"http://127.0.0.1:{port}" + endpoint diff --git a/tests/extension/test_manager.py b/tests/extension/test_manager.py index 2b52fea543..6e48b65df4 100644 --- a/tests/extension/test_manager.py +++ b/tests/extension/test_manager.py @@ -1,5 +1,6 @@ import os -import unittest.mock as mock +import sys +from unittest import mock import pytest from jupyter_core.paths import jupyter_config_path @@ -60,7 +61,7 @@ def test_extension_package_api(): path1 = metadata_list[0] app = path1["app"] - e = ExtensionPackage(name="tests.extension.mockextensions") + e = ExtensionPackage(name="tests.extension.mockextensions", enabled=True) e.extension_points assert hasattr(e, "extension_points") assert len(e.extension_points) == len(metadata_list) @@ -70,7 +71,9 @@ def test_extension_package_api(): def test_extension_package_notfound_error(): with pytest.raises(ExtensionModuleNotFound): - ExtensionPackage(name="nonexistent") + ExtensionPackage(name="nonexistent", enabled=True) + # no raise if not enabled + ExtensionPackage(name="nonexistent", enabled=False) def _normalize_path(path_list): @@ -96,39 +99,71 @@ def test_extension_manager_linked_extensions(jp_serverapp): assert name in manager.linked_extensions -def test_extension_manager_fail_add(jp_serverapp): +@pytest.mark.parametrize("has_app", [True, False]) +def test_extension_manager_fail_add(jp_serverapp, has_app): name = "tests.extension.notanextension" - manager = ExtensionManager(serverapp=jp_serverapp) + manager = ExtensionManager(serverapp=jp_serverapp if has_app else None) manager.add_extension(name, enabled=True) # should only warn jp_serverapp.reraise_server_extension_failures = True - with pytest.raises(ExtensionModuleNotFound): - manager.add_extension(name, enabled=True) + if has_app: + with pytest.raises(ExtensionModuleNotFound): + assert manager.add_extension(name, enabled=True) is False + else: + assert manager.add_extension(name, enabled=True) is False -def test_extension_manager_fail_link(jp_serverapp): +@pytest.mark.parametrize("has_app", [True, False]) +def test_extension_manager_fail_link(jp_serverapp, has_app): name = "tests.extension.mockextensions.app" with mock.patch( "tests.extension.mockextensions.app.MockExtensionApp.parse_command_line", side_effect=RuntimeError, ): - manager = ExtensionManager(serverapp=jp_serverapp) + manager = ExtensionManager(serverapp=jp_serverapp if has_app else None) manager.add_extension(name, enabled=True) manager.link_extension(name) # should only warn jp_serverapp.reraise_server_extension_failures = True - with pytest.raises(RuntimeError): + if has_app: + with pytest.raises(RuntimeError): + manager.link_extension(name) + else: manager.link_extension(name) -def test_extension_manager_fail_load(jp_serverapp): +@pytest.mark.parametrize("has_app", [True, False]) +def test_extension_manager_fail_load(jp_serverapp, has_app): name = "tests.extension.mockextensions.app" with mock.patch( "tests.extension.mockextensions.app.MockExtensionApp.initialize_handlers", side_effect=RuntimeError, ): - manager = ExtensionManager(serverapp=jp_serverapp) + manager = ExtensionManager(serverapp=jp_serverapp if has_app else None) manager.add_extension(name, enabled=True) manager.link_extension(name) manager.load_extension(name) # should only warn jp_serverapp.reraise_server_extension_failures = True - with pytest.raises(RuntimeError): + if has_app: + with pytest.raises(RuntimeError): + manager.load_extension(name) + else: manager.load_extension(name) + + +@pytest.mark.parametrize("has_app", [True, False]) +def test_disable_no_import(jp_serverapp, has_app): + # de-import modules so we can detect if they are re-imported + disabled_ext = "tests.extension.mockextensions.mock1" + enabled_ext = "tests.extension.mockextensions.mock2" + sys.modules.pop(disabled_ext, None) + sys.modules.pop(enabled_ext, None) + + manager = ExtensionManager(serverapp=jp_serverapp if has_app else None) + manager.add_extension(disabled_ext, enabled=False) + manager.add_extension(enabled_ext, enabled=True) + assert disabled_ext not in sys.modules + assert enabled_ext in sys.modules + + ext_pkg = manager.extensions[disabled_ext] + assert ext_pkg.extension_points == {} + assert ext_pkg.version == "" + assert ext_pkg.metadata == [] diff --git a/tests/extension/test_serverextension.py b/tests/extension/test_serverextension.py index eb8390ed3c..98e2a8dec3 100644 --- a/tests/extension/test_serverextension.py +++ b/tests/extension/test_serverextension.py @@ -1,10 +1,20 @@ from collections import OrderedDict import pytest + +try: + from jupyter_core.paths import prefer_environment_over_user +except ImportError: + prefer_environment_over_user = None # type:ignore[assignment] + from traitlets.tests.utils import check_help_all_output from jupyter_server.config_manager import BaseJSONConfigManager from jupyter_server.extension.serverextension import ( + DisableServerExtensionApp, + ListServerExtensionsApp, + ServerExtensionApp, + ToggleServerExtensionApp, _get_config_dir, toggle_server_extension_python, ) @@ -43,6 +53,7 @@ def test_disable(jp_env_config_path, jp_extension_environ): assert not config["mock1"] +@pytest.mark.skipif(prefer_environment_over_user is None, reason="Requires jupyter_core 5.0+") def test_merge_config(jp_env_config_path, jp_configurable_serverapp, jp_extension_environ): # Toggle each extension module with a JSON config file # at the sys-prefix config dir. @@ -58,16 +69,16 @@ def test_merge_config(jp_env_config_path, jp_configurable_serverapp, jp_extensio ) # Write this configuration in two places, sys-prefix and user. - # sys-prefix supercedes users, so the extension should be disabled + # sys-prefix supersedes users, so the extension should be disabled # when these two configs merge. toggle_server_extension_python( "tests.extension.mockextensions.mockext_both", - enabled=True, + enabled=False, sys_prefix=True, ) toggle_server_extension_python( "tests.extension.mockextensions.mockext_both", - enabled=False, + enabled=True, user=True, ) @@ -82,7 +93,8 @@ def test_merge_config(jp_env_config_path, jp_configurable_serverapp, jp_extensio assert extensions["tests.extension.mockextensions.mockext_sys"] assert extensions["tests.extension.mockextensions.mockext_py"] # Merging should causes this extension to be disabled. - assert not extensions["tests.extension.mockextensions.mockext_both"] + if prefer_environment_over_user(): + assert not extensions["tests.extension.mockextensions.mockext_both"] @pytest.mark.parametrize( @@ -104,3 +116,21 @@ def test_load_ordered(jp_serverapp, jp_server_config): assert jp_serverapp.mockII is True, "Mock II should have been loaded" assert jp_serverapp.mockI is True, "Mock I should have been loaded" assert jp_serverapp.mock_shared == "II", "Mock II should be loaded after Mock I" + + +def test_server_extension_apps(jp_env_config_path, jp_extension_environ): + app = ToggleServerExtensionApp() + app.extra_args = ["mock1"] + app.start() + + app2 = DisableServerExtensionApp() + app2.extra_args = ["mock1"] + app2.start() + + app3 = ListServerExtensionsApp() + app3.start() + + +def test_server_extension_app(): + app = ServerExtensionApp() + app.launch_instance(["list"]) diff --git a/tests/extension/test_utils.py b/tests/extension/test_utils.py index 49a36b20ee..a8482161ec 100644 --- a/tests/extension/test_utils.py +++ b/tests/extension/test_utils.py @@ -1,6 +1,14 @@ +import logging + import pytest -from jupyter_server.extension.utils import validate_extension +from jupyter_server.extension.utils import ( + ExtensionLoadingError, + get_loader, + get_metadata, + validate_extension, +) +from tests.extension.mockextensions import mockext_deprecated, mockext_sys # Use ServerApps environment because it monkeypatches # jupyter_core.paths and provides a config directory @@ -17,3 +25,20 @@ def test_validate_extension(): assert validate_extension("tests.extension.mockextensions.mockext_user") # enabled at Python assert validate_extension("tests.extension.mockextensions.mockext_py") + + +def test_get_loader(): + assert get_loader(mockext_sys) == mockext_sys._load_jupyter_server_extension + with pytest.deprecated_call(): + assert get_loader(mockext_deprecated) == mockext_deprecated.load_jupyter_server_extension + with pytest.raises(ExtensionLoadingError): + get_loader(object()) + + +def test_get_metadata(): + _, ext_points = get_metadata("tests.extension.mockextensions.mockext_sys") + assert len(ext_points) + _, ext_points = get_metadata("tests", logger=logging.getLogger()) + point = ext_points[0] + assert point["module"] == "tests" + assert point["name"] == "tests" diff --git a/tests/nbconvert/test_handlers.py b/tests/nbconvert/test_handlers.py index 809f0ba3ec..1805902e76 100644 --- a/tests/nbconvert/test_handlers.py +++ b/tests/nbconvert/test_handlers.py @@ -3,9 +3,9 @@ from shutil import which import pytest -import tornado from nbformat import writes from nbformat.v4 import new_code_cell, new_markdown_cell, new_notebook, new_output +from tornado.httpclient import HTTPClientError from ..utils import expected_http_error @@ -16,7 +16,7 @@ ).decode("ascii") -@pytest.fixture +@pytest.fixture() def notebook(jp_root_dir): # Build sub directory. subdir = jp_root_dir / "foo" @@ -75,7 +75,7 @@ async def test_from_file(jp_fetch, notebook): async def test_from_file_404(jp_fetch, notebook): - with pytest.raises(tornado.httpclient.HTTPClientError) as e: + with pytest.raises(HTTPClientError) as e: await jp_fetch( "nbconvert", "html", diff --git a/tests/services/api/test_api.py b/tests/services/api/test_api.py index c1620ff052..f013dcfcd8 100644 --- a/tests/services/api/test_api.py +++ b/tests/services/api/test_api.py @@ -1,4 +1,11 @@ import json +from typing import Awaitable, Dict, List +from unittest import mock + +import pytest +from tornado.httpclient import HTTPError + +from jupyter_server.auth import Authorizer, IdentityProvider, User async def test_get_spec(jp_fetch): @@ -21,3 +28,156 @@ async def test_get_status(jp_fetch): assert status["kernels"] == 0 assert status["last_activity"].endswith("Z") assert status["started"].endswith("Z") + + +class MockUser(User): + permissions: Dict[str, List[str]] + + +class MockIdentityProvider(IdentityProvider): + mock_user: MockUser + + async def get_user(self, handler): + # super returns a UUID + # return our mock user instead, as long as the request is authorized + _authenticated = super().get_user(handler) + if isinstance(_authenticated, Awaitable): + _authenticated = await _authenticated + authenticated = _authenticated + if isinstance(self.mock_user, dict): + self.mock_user = MockUser(**self.mock_user) + if authenticated: + return self.mock_user + + +class MockAuthorizer(Authorizer): + def is_authorized(self, handler, user, action, resource): + permissions = user.permissions + if permissions == "*": + return True + actions = permissions.get(resource, []) + return action in actions + + +@pytest.fixture() +def identity_provider(jp_serverapp): + idp = MockIdentityProvider(parent=jp_serverapp) + authorizer = MockAuthorizer(parent=jp_serverapp) + with mock.patch.dict( + jp_serverapp.web_app.settings, + {"identity_provider": idp, "authorizer": authorizer}, + ): + yield idp + + +@pytest.mark.parametrize( + "identity, expected", + [ + ( + {"username": "user.username"}, + { + "username": "user.username", + "name": "user.username", + "display_name": "user.username", + }, + ), + ( + {"username": "user", "name": "name", "display_name": "display"}, + {"username": "user", "name": "name", "display_name": "display"}, + ), + ( + None, + 403, + ), + ], +) +async def test_identity(jp_fetch, identity, expected, identity_provider): + if identity: + identity_provider.mock_user = MockUser(**identity) + else: + identity_provider.mock_user = None + + if isinstance(expected, int): + with pytest.raises(HTTPError) as exc: + await jp_fetch("api/me") + print(exc) + assert exc.value.code == expected + return + + r = await jp_fetch("api/me") + + assert r.code == 200 + response = json.loads(r.body.decode()) + assert set(response.keys()) == {"identity", "permissions"} + identity_model = response["identity"] + print(identity_model) + for key, value in expected.items(): + assert identity_model[key] == value + + assert set(identity_model.keys()) == set(User.__dataclass_fields__) + + +@pytest.mark.parametrize( + "have_permissions, check_permissions, expected", + [ + ("*", None, {}), + ( + { + "contents": ["read"], + "kernels": ["read", "write"], + "sessions": ["write"], + }, + { + "contents": ["read", "write"], + "kernels": ["read", "write", "execute"], + "terminals": ["execute"], + }, + { + "contents": ["read"], + "kernels": ["read", "write"], + "terminals": [], + }, + ), + ("*", {"contents": ["write"]}, {"contents": ["write"]}), + ], +) +async def test_identity_permissions( + jp_fetch, have_permissions, check_permissions, expected, identity_provider +): + user = MockUser("username") + user.permissions = have_permissions + identity_provider.mock_user = user + + if check_permissions is not None: + params = {"permissions": json.dumps(check_permissions)} + else: + params = None + + r = await jp_fetch("api/me", params=params) + assert r is not None + assert r.code == 200 + response = json.loads(r.body.decode()) + assert set(response.keys()) == {"identity", "permissions"} + assert response["permissions"] == expected + + +@pytest.mark.parametrize( + "permissions", + [ + "", + "[]", + '"abc"', + json.dumps({"resource": "action"}), + json.dumps({"resource": [5]}), + json.dumps({"resource": {}}), + ], +) +async def test_identity_bad_permissions(jp_fetch, permissions): + with pytest.raises(HTTPError) as exc: + await jp_fetch("api/me", params={"permissions": json.dumps(permissions)}) + + r = exc.value.response + assert r is not None + assert r.code == 400 + reply = json.loads(r.body.decode()) + assert "permissions should be a JSON dict" in reply["message"] diff --git a/tests/services/config/test_api.py b/tests/services/config/test_api.py index 9d4a4b2f01..76f0364389 100644 --- a/tests/services/config/test_api.py +++ b/tests/services/config/test_api.py @@ -43,7 +43,7 @@ async def test_get_unknown(jp_fetch): response = await jp_fetch( "api", "config", - "nonexistant", + "nonexistent", method="GET", ) assert response.code == 200 diff --git a/tests/services/contents/test_api.py b/tests/services/contents/test_api.py index 988dcdb603..b74ee8f62a 100644 --- a/tests/services/contents/test_api.py +++ b/tests/services/contents/test_api.py @@ -1,19 +1,33 @@ import json import pathlib import sys +import warnings from base64 import decodebytes, encodebytes from unicodedata import normalize +from unittest.mock import patch import pytest import tornado -from nbformat import from_dict, writes +from nbformat import from_dict from nbformat.v4 import new_markdown_cell, new_notebook from jupyter_server.utils import url_path_join +from tests.conftest import dirs from ...utils import expected_http_error +@pytest.fixture(autouse=True) +def suppress_deprecation_warnings(): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="The synchronous ContentsManager", + category=DeprecationWarning, + ) + yield + + def notebooks_only(dir_model): return [nb for nb in dir_model["content"] if nb["type"] == "notebook"] @@ -22,22 +36,6 @@ def dirs_only(dir_model): return [x for x in dir_model["content"] if x["type"] == "directory"] -dirs = [ - ("", "inroot"), - ("Directory with spaces in", "inspace"), - ("unicodé", "innonascii"), - ("foo", "a"), - ("foo", "b"), - ("foo", "name with spaces"), - ("foo", "unicodé"), - ("foo/bar", "baz"), - ("ordering", "A"), - ("ordering", "b"), - ("ordering", "C"), - ("å b", "ç d"), -] - - @pytest.fixture(params=["FileContentsManager", "AsyncFileContentsManager"]) def jp_argv(request): return [ @@ -46,49 +44,6 @@ def jp_argv(request): ] -@pytest.fixture -def contents_dir(tmp_path, jp_serverapp): - return tmp_path / jp_serverapp.root_dir - - -@pytest.fixture -def contents(contents_dir): - # Create files in temporary directory - paths = { - "notebooks": [], - "textfiles": [], - "blobs": [], - } - for d, name in dirs: - p = contents_dir / d - p.mkdir(parents=True, exist_ok=True) - - # Create a notebook - nb = writes(new_notebook(), version=4) - nbname = p.joinpath(f"{name}.ipynb") - nbname.write_text(nb, encoding="utf-8") - paths["notebooks"].append(nbname.relative_to(contents_dir)) - - # Create a text file - txt = f"{name} text file" - txtname = p.joinpath(f"{name}.txt") - txtname.write_text(txt, encoding="utf-8") - paths["textfiles"].append(txtname.relative_to(contents_dir)) - - # Create a random blob - blob = name.encode("utf-8") + b"\xFF" - blobname = p.joinpath(f"{name}.blob") - blobname.write_bytes(blob) - paths["blobs"].append(blobname.relative_to(contents_dir)) - paths["all"] = list(paths.values()) - return paths - - -@pytest.fixture -def folders(): - return list({item[0] for item in dirs}) - - @pytest.mark.parametrize("path,name", dirs) async def test_list_notebooks(jp_fetch, contents, path, name): response = await jp_fetch( @@ -127,7 +82,7 @@ async def test_list_nonexistant_dir(jp_fetch, contents): await jp_fetch( "api", "contents", - "nonexistant", + "nonexistent", method="GET", ) @@ -142,11 +97,28 @@ async def test_get_nb_contents(jp_fetch, contents, path, name): assert model["path"] == nbpath assert model["type"] == "notebook" assert "content" in model + assert model["hash"] is None + assert model["hash_algorithm"] is None assert model["format"] == "json" assert "metadata" in model["content"] assert isinstance(model["content"]["metadata"], dict) +@pytest.mark.parametrize("path,name", dirs) +async def test_get_nb_hash(jp_fetch, contents, path, name): + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await jp_fetch("api", "contents", nbpath, method="GET", params=dict(hash="1")) + model = json.loads(r.body.decode()) + assert model["name"] == nbname + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert model["hash"] + assert model["hash_algorithm"] + assert "metadata" in model["content"] + assert isinstance(model["content"]["metadata"], dict) + + @pytest.mark.parametrize("path,name", dirs) async def test_get_nb_no_contents(jp_fetch, contents, path, name): nbname = name + ".ipynb" @@ -156,6 +128,9 @@ async def test_get_nb_no_contents(jp_fetch, contents, path, name): assert model["name"] == nbname assert model["path"] == nbpath assert model["type"] == "notebook" + assert "hash" in model + assert model["hash"] == None + assert "hash_algorithm" in model assert "content" in model assert model["content"] is None @@ -206,6 +181,9 @@ async def test_get_text_file_contents(jp_fetch, contents, path, name): model = json.loads(r.body.decode()) assert model["name"] == txtname assert model["path"] == txtpath + assert "hash" in model + assert model["hash"] == None + assert "hash_algorithm" in model assert "content" in model assert model["format"] == "text" assert model["type"] == "file" @@ -231,6 +209,52 @@ async def test_get_text_file_contents(jp_fetch, contents, path, name): assert expected_http_error(e, 400) +@pytest.mark.parametrize("path,name", dirs) +async def test_get_text_file_hash(jp_fetch, contents, path, name): + txtname = name + ".txt" + txtpath = (path + "/" + txtname).lstrip("/") + r = await jp_fetch("api", "contents", txtpath, method="GET", params=dict(hash="1")) + model = json.loads(r.body.decode()) + assert model["name"] == txtname + assert model["path"] == txtpath + assert "hash" in model + assert model["hash"] + assert model["hash_algorithm"] + assert model["format"] == "text" + assert model["type"] == "file" + + +async def test_get_404_hidden(jp_fetch, contents, contents_dir): + # Create text files + hidden_dir = contents_dir / ".hidden" + hidden_dir.mkdir(parents=True, exist_ok=True) + txt = "visible text file in hidden dir" + txtname = hidden_dir.joinpath("visible.txt") + txtname.write_text(txt, encoding="utf-8") + + txt2 = "hidden text file" + txtname2 = contents_dir.joinpath(".hidden.txt") + txtname2.write_text(txt2, encoding="utf-8") + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + ".hidden/visible.txt", + method="GET", + ) + assert expected_http_error(e, 404) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + ".hidden.txt", + method="GET", + ) + assert expected_http_error(e, 404) + + @pytest.mark.parametrize("path,name", dirs) async def test_get_binary_file_contents(jp_fetch, contents, path, name): blobname = name + ".blob" @@ -240,6 +264,9 @@ async def test_get_binary_file_contents(jp_fetch, contents, path, name): assert model["name"] == blobname assert model["path"] == blobpath assert "content" in model + assert "hash" in model + assert model["hash"] == None + assert "hash_algorithm" in model assert model["format"] == "base64" assert model["type"] == "file" data_out = decodebytes(model["content"].encode("ascii")) @@ -282,7 +309,7 @@ async def test_get_bad_type(jp_fetch, contents): assert expected_http_error(e, 400, "%s is not a directory" % path) -@pytest.fixture +@pytest.fixture() def _check_created(jp_base_url): def _inner(r, contents_dir, path, name, type="notebook"): fpath = path + "/" + name @@ -317,6 +344,10 @@ async def test_create_untitled(jp_fetch, contents, contents_dir, _check_created) r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) _check_created(r, str(contents_dir), path, name, type="notebook") + name = "untitled" + r = await jp_fetch("api", "contents", path, method="POST", allow_nonstandard_methods=True) + _check_created(r, str(contents_dir), path, name=name, type="file") + async def test_create_untitled_txt(jp_fetch, contents, contents_dir, _check_created): name = "untitled.txt" @@ -408,6 +439,44 @@ async def test_upload_txt(jp_fetch, contents, contents_dir, _check_created): assert model["content"] == body +async def test_upload_txt_hidden(jp_fetch, contents, contents_dir): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + body = "ünicode téxt" + model = { + "content": body, + "format": "text", + "type": "file", + } + path = ".hidden/Upload tést.txt" + await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(model)) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + body = "ünicode téxt" + model = {"content": body, "format": "text", "type": "file", "path": ".hidden/test.txt"} + path = "Upload tést.txt" + await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(model)) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + body = "ünicode téxt" + model = { + "content": body, + "format": "text", + "type": "file", + } + path = ".hidden.txt" + await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(model)) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + body = "ünicode téxt" + model = {"content": body, "format": "text", "type": "file", "path": ".hidden.txt"} + path = "Upload tést.txt" + await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(model)) + assert expected_http_error(e, 400) + + async def test_upload_b64(jp_fetch, contents, contents_dir, _check_created): body = b"\xFFblob" b64body = encodebytes(body).decode("ascii") @@ -465,6 +534,27 @@ async def test_copy(jp_fetch, contents, contents_dir, _check_created): _check_created(r, str(contents_dir), path, copy3, type="notebook") +async def test_copy_dir(jp_fetch, contents, contents_dir, _check_created): + # created a nest copy of a the original folder + dest_dir = "foo" + path = "parent" + response = await jp_fetch( + "api", "contents", path, method="POST", body=json.dumps({"copy_from": dest_dir}) + ) + + _check_created(response, str(contents_dir), path, dest_dir, type="directory") + + # copy to a folder where a similar name exists + dest_dir = "foo" + path = "parent" + copy_dir = f"{dest_dir}-Copy1" + response = await jp_fetch( + "api", "contents", path, method="POST", body=json.dumps({"copy_from": dest_dir}) + ) + + _check_created(response, str(contents_dir), path, copy_dir, type="directory") + + async def test_copy_path(jp_fetch, contents, contents_dir, _check_created): path1 = "foo" path2 = "å b" @@ -501,14 +591,107 @@ async def test_copy_put_400(jp_fetch, contents, contents_dir, _check_created): assert expected_http_error(e, 400) -async def test_copy_dir_400(jp_fetch, contents, contents_dir, _check_created): +async def test_copy_put_400_hidden( + jp_fetch, + contents, + contents_dir, +): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + ".hidden/old.txt", + method="PUT", + body=json.dumps({"copy_from": "new.txt"}), + ) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "old.txt", + method="PUT", + body=json.dumps({"copy_from": ".hidden/new.txt"}), + ) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + ".hidden.txt", + method="PUT", + body=json.dumps({"copy_from": "new.txt"}), + ) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "old.txt", + method="PUT", + body=json.dumps({"copy_from": ".hidden.txt"}), + ) + assert expected_http_error(e, 400) + + +async def test_copy_400_hidden( + jp_fetch, + contents, + contents_dir, +): + # Create text files + hidden_dir = contents_dir / ".hidden" + hidden_dir.mkdir(parents=True, exist_ok=True) + txt = "visible text file in hidden dir" + txtname = hidden_dir.joinpath("new.txt") + txtname.write_text(txt, encoding="utf-8") + + paths = ["new.txt", ".hidden.txt"] + for name in paths: + txt = f"{name} text file" + txtname = contents_dir.joinpath(f"{name}.txt") + txtname.write_text(txt, encoding="utf-8") + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + ".hidden/old.txt", + method="POST", + body=json.dumps({"copy_from": "new.txt"}), + ) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + "old.txt", + method="POST", + body=json.dumps({"copy_from": ".hidden/new.txt"}), + ) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + ".hidden.txt", + method="POST", + body=json.dumps({"copy_from": "new.txt"}), + ) + assert expected_http_error(e, 400) + with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "api", "contents", - "foo", + "old.txt", method="POST", - body=json.dumps({"copy_from": "å b"}), + body=json.dumps({"copy_from": ".hidden.txt"}), ) assert expected_http_error(e, 400) @@ -528,7 +711,7 @@ async def test_delete(jp_fetch, contents, contents_dir, path, name, _check_creat async def test_delete_dirs(jp_fetch, contents, folders): # Iterate over folders - for name in sorted(folders + ["/"], key=len, reverse=True): + for name in sorted([*folders, "/"], key=len, reverse=True): r = await jp_fetch("api", "contents", name, method="GET") # Get JSON blobs for each content. listing = json.loads(r.body.decode())["content"] @@ -541,7 +724,7 @@ async def test_delete_dirs(jp_fetch, contents, folders): assert model["content"] == [] -@pytest.mark.skipif(sys.platform == "win32", reason="Disabled deleting non-empty dirs on Windows") +@pytest.mark.xfail(sys.platform == "win32", reason="Deleting non-empty dirs on Windows") async def test_delete_non_empty_dir(jp_fetch, contents): # Delete a folder await jp_fetch("api", "contents", "å b", method="DELETE") @@ -551,6 +734,24 @@ async def test_delete_non_empty_dir(jp_fetch, contents): assert expected_http_error(e, 404) +async def test_delete_hidden_dir(jp_fetch, contents): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "contents", ".hidden", method="DELETE") + assert expected_http_error(e, 400) + + +async def test_delete_hidden_file(jp_fetch, contents): + # Test deleting file in a hidden directory + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "contents", ".hidden/test.txt", method="DELETE") + assert expected_http_error(e, 400) + + # Test deleting a hidden file + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "contents", ".hidden.txt", method="DELETE") + assert expected_http_error(e, 400) + + async def test_rename(jp_fetch, jp_base_url, contents, contents_dir): path = "foo" name = "a.ipynb" @@ -582,6 +783,60 @@ async def test_rename(jp_fetch, jp_base_url, contents, contents_dir): assert "a.ipynb" not in nbnames +async def test_rename_400_hidden(jp_fetch, jp_base_url, contents, contents_dir): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + old_path = ".hidden/old.txt" + new_path = "new.txt" + # Rename the file + r = await jp_fetch( + "api", + "contents", + old_path, + method="PATCH", + body=json.dumps({"path": new_path}), + ) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + old_path = "old.txt" + new_path = ".hidden/new.txt" + # Rename the file + r = await jp_fetch( + "api", + "contents", + old_path, + method="PATCH", + body=json.dumps({"path": new_path}), + ) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + old_path = ".hidden.txt" + new_path = "new.txt" + # Rename the file + r = await jp_fetch( + "api", + "contents", + old_path, + method="PATCH", + body=json.dumps({"path": new_path}), + ) + assert expected_http_error(e, 400) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + old_path = "old.txt" + new_path = ".hidden.txt" + # Rename the file + r = await jp_fetch( + "api", + "contents", + old_path, + method="PATCH", + body=json.dumps({"path": new_path}), + ) + assert expected_http_error(e, 400) + + async def test_checkpoints_follow_file(jp_fetch, contents): path = "foo" name = "a.ipynb" @@ -798,3 +1053,73 @@ async def test_trust(jp_fetch, contents): allow_nonstandard_methods=True, ) assert r.code == 201 + + +@patch( + "jupyter_core.paths.is_hidden", + side_effect=AssertionError("Should not call is_hidden if not important"), +) +@patch( + "jupyter_server.services.contents.filemanager.is_hidden", + side_effect=AssertionError("Should not call is_hidden if not important"), +) +async def test_regression_is_hidden(m1, m2, jp_fetch, jp_serverapp, contents, _check_created): + # check that no is_hidden check runs if configured to allow hidden files + contents_dir = contents["contents_dir"] + + hidden_dir = contents_dir / ".hidden" + hidden_dir.mkdir(parents=True, exist_ok=True) + txt = "visible text file in hidden dir" + txtname = hidden_dir.joinpath("visible.txt") + txtname.write_text(txt, encoding="utf-8") + + # Our role here is to check that the side-effect never triggers + jp_serverapp.contents_manager.allow_hidden = True + r = await jp_fetch( + "api", + "contents", + ".hidden", + ) + assert r.code == 200 + + r = await jp_fetch( + "api", + "contents", + ".hidden", + method="POST", + body=json.dumps( + { + "copy_from": ".hidden/visible.txt", + } + ), + ) + _check_created(r, str(contents_dir), ".hidden", "visible-Copy1.txt", type="file") + + r = await jp_fetch( + "api", + "contents", + ".hidden", + "visible-Copy1.txt", + method="DELETE", + ) + assert r.code == 204 + + model = { + "content": "foo", + "format": "text", + "type": "file", + } + r = await jp_fetch( + "api", "contents", ".hidden", "new.txt", method="PUT", body=json.dumps(model) + ) + _check_created(r, str(contents_dir), ".hidden", "new.txt", type="file") + + # sanity check that is actually triggers when flag set to false + jp_serverapp.contents_manager.allow_hidden = False + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch( + "api", + "contents", + ".hidden", + ) + assert expected_http_error(e, 500) diff --git a/tests/services/contents/test_checkpoints.py b/tests/services/contents/test_checkpoints.py new file mode 100644 index 0000000000..e3df0abf5c --- /dev/null +++ b/tests/services/contents/test_checkpoints.py @@ -0,0 +1,130 @@ +import pytest +from jupyter_core.utils import ensure_async +from nbformat import from_dict +from nbformat.v4 import new_markdown_cell + +from jupyter_server.services.contents.filecheckpoints import ( + AsyncFileCheckpoints, + AsyncGenericFileCheckpoints, + FileCheckpoints, + GenericFileCheckpoints, +) +from jupyter_server.services.contents.largefilemanager import ( + AsyncLargeFileManager, + LargeFileManager, +) + +param_pairs = [ + (LargeFileManager, FileCheckpoints), + (LargeFileManager, GenericFileCheckpoints), + (AsyncLargeFileManager, AsyncFileCheckpoints), + (AsyncLargeFileManager, AsyncGenericFileCheckpoints), +] + + +@pytest.fixture(params=param_pairs) +def contents_manager(request, contents): + """Returns a LargeFileManager instance.""" + file_manager, checkpoints_class = request.param + root_dir = str(contents["contents_dir"]) + return file_manager(root_dir=root_dir, checkpoints_class=checkpoints_class) + + +async def test_checkpoints_follow_file(contents_manager): + cm: LargeFileManager = contents_manager + path = "foo/a.ipynb" + + # Read initial file. + model = await ensure_async(cm.get(path)) + + # Create a checkpoint of initial state + cp1 = await ensure_async(cm.create_checkpoint(path)) + + # Modify file and save. + nbcontent = model["content"] + nb = from_dict(nbcontent) + hcell = new_markdown_cell("Created by test") + nb.cells.append(hcell) + nbmodel = {"content": nb, "type": "notebook"} + await ensure_async(cm.save(nbmodel, path)) + + # List checkpoints + cps = await ensure_async(cm.list_checkpoints(path)) + assert cps == [cp1] + + model = await ensure_async(cm.get(path)) + nbcontent = model["content"] + nb = from_dict(nbcontent) + assert nb.cells[0].source == "Created by test" + + +async def test_nb_checkpoints(contents_manager): + cm: LargeFileManager = contents_manager + path = "foo/a.ipynb" + model = await ensure_async(cm.get(path)) + cp1 = await ensure_async(cm.create_checkpoint(path)) + assert set(cp1) == {"id", "last_modified"} + + # Modify it. + nbcontent = model["content"] + nb = from_dict(nbcontent) + hcell = new_markdown_cell("Created by test") + nb.cells.append(hcell) + + # Save it. + nbmodel = {"content": nb, "type": "notebook"} + await ensure_async(cm.save(nbmodel, path)) + + # List checkpoints + cps = await ensure_async(cm.list_checkpoints(path)) + assert cps == [cp1] + + nbcontent = await ensure_async(cm.get(path)) + nb = from_dict(nbcontent["content"]) + assert nb.cells[0].source == "Created by test" + + # Restore Checkpoint cp1 + await ensure_async(cm.restore_checkpoint(cp1["id"], path)) + + nbcontent = await ensure_async(cm.get(path)) + nb = from_dict(nbcontent["content"]) + assert nb.cells == [] + + # Delete cp1 + await ensure_async(cm.delete_checkpoint(cp1["id"], path)) + + cps = await ensure_async(cm.list_checkpoints(path)) + assert cps == [] + + +async def test_file_checkpoints(contents_manager): + cm: LargeFileManager = contents_manager + path = "foo/a.txt" + model = await ensure_async(cm.get(path)) + orig_content = model["content"] + + cp1 = await ensure_async(cm.create_checkpoint(path)) + assert set(cp1) == {"id", "last_modified"} + + # Modify and save it. + model["content"] = new_content = orig_content + "\nsecond line" + await ensure_async(cm.save(model, path)) + + # List checkpoints + cps = await ensure_async(cm.list_checkpoints(path)) + assert cps == [cp1] + + model = await ensure_async(cm.get(path)) + assert model["content"] == new_content + + # Restore Checkpoint cp1 + await ensure_async(cm.restore_checkpoint(cp1["id"], path)) + + restored_content = await ensure_async(cm.get(path)) + assert restored_content["content"] == orig_content + + # Delete cp1 + await ensure_async(cm.delete_checkpoint(cp1["id"], path)) + + cps = await ensure_async(cm.list_checkpoints(path)) + assert cps == [] diff --git a/tests/services/contents/test_config.py b/tests/services/contents/test_config.py index 189226b49d..6a7243333c 100644 --- a/tests/services/contents/test_config.py +++ b/tests/services/contents/test_config.py @@ -2,13 +2,13 @@ from jupyter_server.services.contents.checkpoints import AsyncCheckpoints from jupyter_server.services.contents.filecheckpoints import ( + AsyncFileCheckpoints, AsyncGenericFileCheckpoints, - GenericFileCheckpoints, ) from jupyter_server.services.contents.manager import AsyncContentsManager -@pytest.fixture(params=[AsyncGenericFileCheckpoints, GenericFileCheckpoints]) +@pytest.fixture(params=[AsyncGenericFileCheckpoints, AsyncFileCheckpoints]) def jp_server_config(request): return {"FileContentsManager": {"checkpoints_class": request.param}} @@ -44,7 +44,7 @@ def test_pre_post_save_hook_config(jp_serverapp, jp_server_config): assert jp_serverapp.contents_manager.post_save_hook.__name__ == "example_post_save_hook" -async def test_async_contents_manager(jp_configurable_serverapp): +def test_async_contents_manager(jp_configurable_serverapp): config = {"ContentsManager": {"checkpoints_class": AsyncCheckpoints}} argv = [ "--ServerApp.contents_manager_class=jupyter_server.services.contents.manager.AsyncContentsManager" diff --git a/tests/services/contents/test_fileio.py b/tests/services/contents/test_fileio.py index 017916ce07..12752ee810 100644 --- a/tests/services/contents/test_fileio.py +++ b/tests/services/contents/test_fileio.py @@ -1,10 +1,21 @@ +import json +import logging import os import stat import sys import pytest - -from jupyter_server.services.contents.fileio import atomic_writing +from nbformat import validate +from nbformat.v4 import new_notebook +from tornado.web import HTTPError + +from jupyter_server.services.contents.fileio import ( + AsyncFileManagerMixin, + FileManagerMixin, + atomic_writing, + path_to_intermediate, + path_to_invalid, +) umask = 0 @@ -30,10 +41,9 @@ class CustomExc(Exception): # OSError: The user lacks the privilege (Windows) have_symlink = False - with pytest.raises(CustomExc): - with atomic_writing(str(f1)) as f: - f.write("Failing write") - raise CustomExc + with pytest.raises(CustomExc), atomic_writing(str(f1)) as f: + f.write("Failing write") + raise CustomExc with open(str(f1)) as f: assert f.read() == "Before" @@ -57,7 +67,7 @@ class CustomExc(Exception): assert f.read() == "written from symlink" -@pytest.fixture +@pytest.fixture() def handle_umask(): global umask umask = os.umask(0) @@ -68,7 +78,6 @@ def handle_umask(): @pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows") def test_atomic_writing_umask(handle_umask, tmp_path): - os.umask(0o022) f1 = str(tmp_path / "1") with atomic_writing(f1) as f: @@ -121,3 +130,88 @@ def test_atomic_writing_newlines(tmp_path): with open(path, newline="") as f: read = f.read() assert read == text + + +def test_path_to_invalid(tmpdir): + assert path_to_invalid(tmpdir) == str(tmpdir) + ".invalid" + + +@pytest.mark.skipif(os.name == "nt", reason="test fails on Windows") +def test_file_manager_mixin(tmp_path): + mixin = FileManagerMixin() + mixin.log = logging.getLogger() + bad_content = tmp_path / "bad_content.ipynb" + bad_content.write_text("{}", "utf8") + # Same as `echo -n {} | sha256sum` + assert mixin._get_hash(bad_content.read_bytes()) == { + "hash": "44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a", + "hash_algorithm": "sha256", + } + with pytest.raises(HTTPError): + mixin._read_notebook(bad_content) + other = path_to_intermediate(bad_content) + with open(other, "w") as fid: + json.dump(new_notebook(), fid) + mixin.use_atomic_writing = True + nb = mixin._read_notebook(bad_content) + validate(nb) + + with pytest.raises(HTTPError): + mixin._read_file(tmp_path, "text") + + with pytest.raises(HTTPError): + mixin._save_file(tmp_path / "foo", "foo", "bar") + + +@pytest.mark.skipif(os.name == "nt", reason="test fails on Windows") +async def test_async_file_manager_mixin(tmpdir): + mixin = AsyncFileManagerMixin() + mixin.log = logging.getLogger() + bad_content = tmpdir / "bad_content.ipynb" + bad_content.write_text("{}", "utf8") + with pytest.raises(HTTPError): + await mixin._read_notebook(bad_content) + other = path_to_intermediate(bad_content) + with open(other, "w") as fid: + json.dump(new_notebook(), fid) + mixin.use_atomic_writing = True + nb, bcontent = await mixin._read_notebook(bad_content, raw=True) + # Same as `echo -n {} | sha256sum` + assert mixin._get_hash(bcontent) == { + "hash": "4747f9680816e352a697d0fb69d82334457cdd1e46f053e800859833d3e6003e", + "hash_algorithm": "sha256", + } + validate(nb) + + with pytest.raises(HTTPError): + await mixin._read_file(tmpdir, "text") + + with pytest.raises(HTTPError): + await mixin._save_file(tmpdir / "foo", "foo", "bar") + + +async def test_AsyncFileManagerMixin_read_notebook_no_raw(tmpdir): + mixin = AsyncFileManagerMixin() + mixin.log = logging.getLogger() + bad_content = tmpdir / "bad_content.ipynb" + bad_content.write_text("{}", "utf8") + + other = path_to_intermediate(bad_content) + with open(other, "w") as fid: + json.dump(new_notebook(), fid) + mixin.use_atomic_writing = True + answer = await mixin._read_notebook(bad_content) + + assert not isinstance(answer, tuple) + + +async def test_AsyncFileManagerMixin_read_file_no_raw(tmpdir): + mixin = AsyncFileManagerMixin() + mixin.log = logging.getLogger() + file_path = tmpdir / "bad_content.text" + file_path.write_text("blablabla", "utf8") + + mixin.use_atomic_writing = True + answer = await mixin._read_file(file_path, "text") + + assert len(answer) == 2 diff --git a/tests/services/contents/test_largefilemanager.py b/tests/services/contents/test_largefilemanager.py index 82c5e54e78..8af608b46c 100644 --- a/tests/services/contents/test_largefilemanager.py +++ b/tests/services/contents/test_largefilemanager.py @@ -1,11 +1,11 @@ import pytest import tornado +from jupyter_core.utils import ensure_async from jupyter_server.services.contents.largefilemanager import ( AsyncLargeFileManager, LargeFileManager, ) -from jupyter_server.utils import ensure_async from ...utils import expected_http_error diff --git a/tests/services/contents/test_manager.py b/tests/services/contents/test_manager.py index 6765cbbe54..e718036b0b 100644 --- a/tests/services/contents/test_manager.py +++ b/tests/services/contents/test_manager.py @@ -1,4 +1,5 @@ import os +import shutil import sys import time from itertools import combinations @@ -6,6 +7,7 @@ from unittest.mock import patch import pytest +from jupyter_core.utils import ensure_async from nbformat import ValidationError from nbformat import v4 as nbformat from tornado.web import HTTPError @@ -15,7 +17,6 @@ AsyncFileContentsManager, FileContentsManager, ) -from jupyter_server.utils import ensure_async from ...utils import expected_http_error @@ -52,6 +53,39 @@ def _make_dir(jp_contents_manager, api_path): print("Directory already exists: %r" % os_path) +def _make_big_dir(contents_manager, api_path): + # make a directory that is over 100 MB in size + os_path = contents_manager._get_os_path(api_path) + try: + os.makedirs(os_path) + + with open(f"{os_path}/demofile.txt", "a") as textFile: + textFile.write( + """ + Lorem ipsum dolor sit amet, consectetur adipiscing elit, + sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. + Ut enim ad minim veniam, quis nostrud exercitation ullamco + laboris nisi ut aliquip ex ea commodo consequat. + Duis aute irure dolor in reprehenderit in voluptate + velit esse cillum dolore eu fugiat nulla pariatur. + Excepteur sint occaecat cupidatat non proident, + sunt in culpa qui officia deserunt mollit anim id est laborum. + """ + ) + + num_sub_folders = contents_manager.max_copy_folder_size_mb * 10 + for i in range(num_sub_folders): + os.makedirs(f"{os_path}/subfolder-{i}") + for j in range(200): + shutil.copy( + f"{os_path}/demofile.txt", + f"{os_path}/subfolder-{i}/testfile{j}.txt", + ) + + except OSError as err: + print("Directory already exists", err) + + def symlink(jp_contents_manager, src, dst): """Make a symlink to src from dst @@ -77,7 +111,7 @@ def add_invalid_cell(notebook): async def prepare_notebook( - jp_contents_manager, make_invalid: Optional[bool] = False + jp_contents_manager: FileContentsManager, make_invalid: Optional[bool] = False ) -> Tuple[Dict, str]: cm = jp_contents_manager model = await ensure_async(cm.new_untitled(type="notebook")) @@ -163,6 +197,9 @@ def test_get_os_path(jp_file_contents_manager_class, tmp_path): fs_path = os.path.join(fm.root_dir, "test.ipynb") assert path == fs_path + +@pytest.mark.skipif(os.name == "nt", reason="Posix only") +def test_get_os_path_posix(jp_file_contents_manager_class, tmp_path): fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) path = fm._get_os_path("////test.ipynb") fs_path = os.path.join(fm.root_dir, "test.ipynb") @@ -243,9 +280,8 @@ async def test_good_symlink(jp_file_contents_manager_class, tmp_path): @pytest.mark.skipif(sys.platform.startswith("win"), reason="Can't test permissions on Windows") async def test_403(jp_file_contents_manager_class, tmp_path): - if hasattr(os, "getuid"): - if os.getuid() == 0: - raise pytest.skip("Can't test permissions as root") + if hasattr(os, "getuid") and os.getuid() == 0: + raise pytest.skip("Can't test permissions as root") td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) @@ -260,6 +296,156 @@ async def test_403(jp_file_contents_manager_class, tmp_path): assert e.status_code == 403 +async def test_400(jp_file_contents_manager_class, tmp_path): + # Test Delete behavior + # Test delete of file in hidden directory + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + hidden_dir = ".hidden" + file_in_hidden_path = os.path.join(hidden_dir, "visible.txt") + _make_dir(cm, hidden_dir) + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.delete_file(file_in_hidden_path)) + assert excinfo.value.status_code == 400 + + # Test delete hidden file in visible directory + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + hidden_dir = "visible" + file_in_hidden_path = os.path.join(hidden_dir, ".hidden.txt") + _make_dir(cm, hidden_dir) + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.delete_file(file_in_hidden_path)) + assert excinfo.value.status_code == 400 + + # Test Save behavior + # Test save of file in hidden directory + with pytest.raises(HTTPError) as excinfo: + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + hidden_dir = ".hidden" + file_in_hidden_path = os.path.join(hidden_dir, "visible.txt") + _make_dir(cm, hidden_dir) + model = await ensure_async(cm.new(path=file_in_hidden_path)) + os_path = cm._get_os_path(model["path"]) + + try: + result = await ensure_async(cm.save(model, path=os_path)) + except HTTPError as e: + assert e.status_code == 400 + + # Test save hidden file in visible directory + with pytest.raises(HTTPError) as excinfo: + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + hidden_dir = "visible" + file_in_hidden_path = os.path.join(hidden_dir, ".hidden.txt") + _make_dir(cm, hidden_dir) + model = await ensure_async(cm.new(path=file_in_hidden_path)) + os_path = cm._get_os_path(model["path"]) + + try: + result = await ensure_async(cm.save(model, path=os_path)) + except HTTPError as e: + assert e.status_code == 400 + + # Test rename behavior + # Test rename with source file in hidden directory + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + hidden_dir = ".hidden" + file_in_hidden_path = os.path.join(hidden_dir, "visible.txt") + _make_dir(cm, hidden_dir) + old_path = file_in_hidden_path + new_path = "new.txt" + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.rename_file(old_path, new_path)) + assert excinfo.value.status_code == 400 + + # Test rename of dest file in hidden directory + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + hidden_dir = ".hidden" + file_in_hidden_path = os.path.join(hidden_dir, "visible.txt") + _make_dir(cm, hidden_dir) + new_path = file_in_hidden_path + old_path = "old.txt" + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.rename_file(old_path, new_path)) + assert excinfo.value.status_code == 400 + + # Test rename with hidden source file in visible directory + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + hidden_dir = "visible" + file_in_hidden_path = os.path.join(hidden_dir, ".hidden.txt") + _make_dir(cm, hidden_dir) + old_path = file_in_hidden_path + new_path = "new.txt" + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.rename_file(old_path, new_path)) + assert excinfo.value.status_code == 400 + + # Test rename with hidden dest file in visible directory + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + hidden_dir = "visible" + file_in_hidden_path = os.path.join(hidden_dir, ".hidden.txt") + _make_dir(cm, hidden_dir) + new_path = file_in_hidden_path + old_path = "old.txt" + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.rename_file(old_path, new_path)) + assert excinfo.value.status_code == 400 + + +async def test_404(jp_file_contents_manager_class, tmp_path): + # setup + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + + # Test visible file in hidden folder + cm.allow_hidden = True + hidden_dir = ".hidden" + file_in_hidden_path = os.path.join(hidden_dir, "visible.txt") + _make_dir(cm, hidden_dir) + model = await ensure_async(cm.new(path=file_in_hidden_path)) + os_path = cm._get_os_path(model["path"]) + cm.allow_hidden = False + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.get(os_path)) + assert excinfo.value.status_code == 404 + + # Test hidden file in visible folder + cm.allow_hidden = True + hidden_dir = "visible" + file_in_hidden_path = os.path.join(hidden_dir, ".hidden.txt") + _make_dir(cm, hidden_dir) + model = await ensure_async(cm.new(path=file_in_hidden_path)) + os_path = cm._get_os_path(model["path"]) + cm.allow_hidden = False + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.get(os_path)) + assert excinfo.value.status_code == 404 + + # Test file not found + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + not_a_file = "foo.bar" + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.get(not_a_file)) + assert excinfo.value.status_code == 404 + + async def test_escape_root(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) @@ -271,19 +457,19 @@ async def test_escape_root(jp_file_contents_manager_class, tmp_path): with pytest.raises(HTTPError) as e: await ensure_async(cm.get("..")) - expected_http_error(e, 404) + assert expected_http_error(e, 404) with pytest.raises(HTTPError) as e: await ensure_async(cm.get("foo/../../../bar")) - expected_http_error(e, 404) + assert expected_http_error(e, 404) with pytest.raises(HTTPError) as e: await ensure_async(cm.delete("../foo")) - expected_http_error(e, 404) + assert expected_http_error(e, 404) with pytest.raises(HTTPError) as e: await ensure_async(cm.rename("../foo", "../bar")) - expected_http_error(e, 404) + assert expected_http_error(e, 404) with pytest.raises(HTTPError) as e: await ensure_async( @@ -296,7 +482,7 @@ async def test_escape_root(jp_file_contents_manager_class, tmp_path): path="../foo", ) ) - expected_http_error(e, 404) + assert expected_http_error(e, 404) async def test_new_untitled(jp_contents_manager): @@ -385,6 +571,17 @@ async def test_get(jp_contents_manager): nb_as_bin_file = await ensure_async(cm.get(path, content=True, type="file", format="base64")) assert nb_as_bin_file["format"] == "base64" + nb_with_hash = await ensure_async(cm.get(path, require_hash=True)) + assert nb_with_hash["hash"] + assert nb_with_hash["hash_algorithm"] + + # Get the hash without the content + nb_with_hash = await ensure_async(cm.get(path, content=False, require_hash=True)) + assert nb_with_hash["content"] is None + assert nb_with_hash["format"] is None + assert nb_with_hash["hash"] + assert nb_with_hash["hash_algorithm"] + # Test in sub-directory sub_dir = "/foo/" _make_dir(cm, "foo") @@ -399,7 +596,7 @@ async def test_get(jp_contents_manager): # Test with a regular file. file_model_path = (await ensure_async(cm.new_untitled(path=sub_dir, ext=".txt")))["path"] - file_model = await ensure_async(cm.get(file_model_path)) + file_model = await ensure_async(cm.get(file_model_path, require_hash=True)) expected_model = { "content": "", "format": "text", @@ -408,12 +605,34 @@ async def test_get(jp_contents_manager): "path": "foo/untitled.txt", "type": "file", "writable": True, + "hash_algorithm": cm.hash_algorithm, + } + # Assert expected model is in file_model + for key, value in expected_model.items(): + assert file_model[key] == value + assert "created" in file_model + assert "last_modified" in file_model + assert file_model["hash"] + + # Get hash without content + file_model = await ensure_async(cm.get(file_model_path, content=False, require_hash=True)) + expected_model = { + "content": None, + "format": None, + "mimetype": "text/plain", + "name": "untitled.txt", + "path": "foo/untitled.txt", + "type": "file", + "writable": True, + "hash_algorithm": cm.hash_algorithm, } + # Assert expected model is in file_model for key, value in expected_model.items(): assert file_model[key] == value assert "created" in file_model assert "last_modified" in file_model + assert file_model["hash"] # Create a sub-sub directory to test getting directory contents with a # subdir. @@ -673,6 +892,55 @@ async def test_copy(jp_contents_manager): assert copy3["path"] == "copy 3.ipynb" +async def test_copy_dir(jp_contents_manager): + cm = jp_contents_manager + destDir = "Untitled Folder 1" + sourceDir = "Morningstar Notebooks" + nonExistantDir = "FolderDoesNotExist" + + _make_dir(cm, destDir) + _make_dir(cm, sourceDir) + + nestedDir = f"{destDir}/{sourceDir}" + + # copy one folder insider another folder + copy = await ensure_async(cm.copy(from_path=sourceDir, to_path=destDir)) + assert copy["path"] == nestedDir + + # need to test when copying in a directory where the another folder with the same name exists + _make_dir(cm, nestedDir) + copy = await ensure_async(cm.copy(from_path=sourceDir, to_path=destDir)) + assert copy["path"] == f"{nestedDir}-Copy1" + + # need to test for when copying in the same path as the sourceDir + copy = await ensure_async(cm.copy(from_path=sourceDir, to_path="")) + assert copy["path"] == f"{sourceDir}-Copy1" + + # ensure its still possible to copy a folder to another folder that doesn't exist + copy = await ensure_async( + cm.copy( + from_path=sourceDir, + to_path=nonExistantDir, + ) + ) + assert copy["path"] == f"{nonExistantDir}/{sourceDir}" + + +async def test_copy_big_dir(jp_contents_manager): + # this tests how the Content API limits preventing copying folders that are more than + # the size limit specified in max_copy_folder_size_mb trait + cm = jp_contents_manager + destDir = "Untitled Folder 1" + sourceDir = "Morningstar Notebooks" + cm.max_copy_folder_size_mb = 5 + _make_dir(cm, destDir) + _make_big_dir(contents_manager=cm, api_path=sourceDir) + with pytest.raises(HTTPError) as exc_info: + await ensure_async(cm.copy(from_path=sourceDir, to_path=destDir)) + + assert exc_info.type is HTTPError + + async def test_mark_trusted_cells(jp_contents_manager): cm = jp_contents_manager nb, name, path = await new_notebook(cm) @@ -756,7 +1024,7 @@ async def test_validate_notebook_model(jp_contents_manager): with patch("jupyter_server.services.contents.manager.validate_nb") as mock_validate_nb: # Valid notebook and a non-None dictionary, no validate call expected - validation_error = {} + validation_error: dict = {} cm.validate_notebook_model(model, validation_error) assert mock_validate_nb.call_count == 0 mock_validate_nb.reset_mock() @@ -782,3 +1050,43 @@ async def test_validate_notebook_model(jp_contents_manager): cm.validate_notebook_model(model) assert mock_validate_nb.call_count == 1 mock_validate_nb.reset_mock() + + +@patch( + "jupyter_core.paths.is_hidden", + side_effect=AssertionError("Should not call is_hidden if not important"), +) +@patch( + "jupyter_server.services.contents.filemanager.is_hidden", + side_effect=AssertionError("Should not call is_hidden if not important"), +) +async def test_regression_is_hidden(m1, m2, jp_contents_manager): + cm = jp_contents_manager + cm.allow_hidden = True + # Our role here is to check that the side-effect never triggers + dirname = "foo/.hidden_dir" + await make_populated_dir(cm, dirname) + await ensure_async(cm.get(dirname)) + await check_populated_dir_files(cm, dirname) + await ensure_async(cm.get(path="/".join([dirname, "nb.ipynb"]))) + await ensure_async(cm.get(path="/".join([dirname, "file.txt"]))) + await ensure_async(cm.new(path="/".join([dirname, "nb2.ipynb"]))) + await ensure_async(cm.new(path="/".join([dirname, "file2.txt"]))) + await ensure_async(cm.new(path="/".join([dirname, "subdir"]), model={"type": "directory"})) + await ensure_async( + cm.copy( + from_path="/".join([dirname, "file.txt"]), to_path="/".join([dirname, "file-copy.txt"]) + ) + ) + await ensure_async( + cm.rename_file( + old_path="/".join([dirname, "file-copy.txt"]), + new_path="/".join([dirname, "file-renamed.txt"]), + ) + ) + await ensure_async(cm.delete_file(path="/".join([dirname, "file-renamed.txt"]))) + + # sanity check that is actually triggers when flag set to false + cm.allow_hidden = False + with pytest.raises(AssertionError): + await ensure_async(cm.get(dirname)) diff --git a/tests/services/contents/test_manager_no_hash.py b/tests/services/contents/test_manager_no_hash.py new file mode 100644 index 0000000000..511a8d319b --- /dev/null +++ b/tests/services/contents/test_manager_no_hash.py @@ -0,0 +1,44 @@ +import json + +import pytest + +from jupyter_server.services.contents.filemanager import ( + AsyncFileContentsManager, +) + + +class NoHashFileManager(AsyncFileContentsManager): + """FileManager prior to 2.11 that introduce the ability to request file hash.""" + + def _base_model(self, path): + """Drop new attributes from model.""" + model = super()._base_model(path) + + del model["hash"] + del model["hash_algorithm"] + + return model + + async def get(self, path, content=True, type=None, format=None): + """Get without the new `require_hash` argument""" + model = await super().get(path, content=content, type=type, format=format) + return model + + +@pytest.fixture +def jp_server_config(jp_server_config): + jp_server_config["ServerApp"]["contents_manager_class"] = NoHashFileManager + return jp_server_config + + +async def test_manager_no_hash_support(tmp_path, jp_root_dir, jp_fetch): + # Create some content + path = "dummy.txt" + (jp_root_dir / path).write_text("blablabla", encoding="utf-8") + + response = await jp_fetch("api", "contents", path, method="GET", params=dict(hash="1")) + + model = json.loads(response.body) + + assert "hash" not in model + assert "hash_algorithm" not in model diff --git a/tests/services/events/__init__.py b/tests/services/events/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/services/events/mock_event.yaml b/tests/services/events/mock_event.yaml new file mode 100644 index 0000000000..dabaa23db5 --- /dev/null +++ b/tests/services/events/mock_event.yaml @@ -0,0 +1,15 @@ +$id: http://event.mock.jupyter.org/message +version: 1 +title: Message +description: | + Emit a message +type: object +properties: + event_message: + title: Event Messages + categories: + - unrestricted + description: | + Mock event message to read. +required: + - event_message diff --git a/tests/services/events/mockextension/__init__.py b/tests/services/events/mockextension/__init__.py new file mode 100644 index 0000000000..ed7c0e9d37 --- /dev/null +++ b/tests/services/events/mockextension/__init__.py @@ -0,0 +1,10 @@ +from .mock_extension import _load_jupyter_server_extension + +# Function that makes these extensions discoverable +# by the test functions. + + +def _jupyter_server_extension_points(): + return [ + {"module": "tests.services.events.mockextension"}, + ] diff --git a/tests/services/events/mockextension/mock_extension.py b/tests/services/events/mockextension/mock_extension.py new file mode 100644 index 0000000000..6b9f5afc4a --- /dev/null +++ b/tests/services/events/mockextension/mock_extension.py @@ -0,0 +1,22 @@ +import pathlib + +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.utils import url_path_join + + +class MockEventHandler(JupyterHandler): + def get(self): + # Emit an event. + self.event_logger.emit( + schema_id="http://event.mockextension.jupyter.org/message", + data={"event_message": "Hello world, from mock extension!"}, + ) + + +def _load_jupyter_server_extension(serverapp): + # Register a schema with the EventBus + schema_file = pathlib.Path(__file__).parent / "mock_extension_event.yaml" + serverapp.event_logger.register_event_schema(schema_file) + serverapp.web_app.add_handlers( + ".*$", [(url_path_join(serverapp.base_url, "/mock/event"), MockEventHandler)] + ) diff --git a/tests/services/events/mockextension/mock_extension_event.yaml b/tests/services/events/mockextension/mock_extension_event.yaml new file mode 100644 index 0000000000..b7c03d1a48 --- /dev/null +++ b/tests/services/events/mockextension/mock_extension_event.yaml @@ -0,0 +1,15 @@ +$id: http://event.mockextension.jupyter.org/message +version: 1 +title: Message +description: | + Emit a message +type: object +properties: + event_message: + title: Event Message + categories: + - unrestricted + description: | + Mock event message to read. +required: + - event_message diff --git a/tests/services/events/test_api.py b/tests/services/events/test_api.py new file mode 100644 index 0000000000..d84b112240 --- /dev/null +++ b/tests/services/events/test_api.py @@ -0,0 +1,155 @@ +import io +import json +import logging +import pathlib + +import pytest +import tornado + +from tests.utils import expected_http_error + + +@pytest.fixture() +def event_logger_sink(jp_serverapp): + event_logger = jp_serverapp.event_logger + # Register the event schema defined in this directory. + schema_file = pathlib.Path(__file__).parent / "mock_event.yaml" + event_logger.register_event_schema(schema_file) + sink = io.StringIO() + handler = logging.StreamHandler(sink) + event_logger.register_handler(handler) + return event_logger, sink + + +@pytest.fixture() +def event_logger(event_logger_sink): + event_logger, sink = event_logger_sink + return event_logger + + +async def test_subscribe_websocket(event_logger, jp_ws_fetch): + ws = await jp_ws_fetch("/api/events/subscribe") + + event_logger.emit( + schema_id="http://event.mock.jupyter.org/message", + data={"event_message": "Hello, world!"}, + ) + # await event_logger.gather_listeners() + message = await ws.read_message() + event_data = json.loads(message) + ws.close() + + assert event_data.get("event_message") == "Hello, world!" + + +payload_1 = """\ +{ + "schema_id": "http://event.mock.jupyter.org/message", + "version": 1, + "data": { + "event_message": "Hello, world!" + }, + "timestamp": "2022-05-26T12:50:00+06:00Z" +} +""" + +payload_2 = """\ +{ + "schema_id": "http://event.mock.jupyter.org/message", + "version": 1, + "data": { + "event_message": "Hello, world!" + } +} +""" + + +@pytest.mark.parametrize("payload", [payload_1, payload_2]) +async def test_post_event(jp_fetch, event_logger_sink, payload): + event_logger, sink = event_logger_sink + + r = await jp_fetch("api", "events", method="POST", body=payload) + assert r.code == 204 + + output = sink.getvalue() + assert output + input = json.loads(payload) + data = json.loads(output) + assert input["data"]["event_message"] == data["event_message"] + assert data["__timestamp__"] + if "timestamp" in input: + assert input["timestamp"] == data["__timestamp__"] + + +payload_3 = """\ +{ + "schema_id": "http://event.mock.jupyter.org/message", + "data": { + "event_message": "Hello, world!" + } +} +""" + +payload_4 = """\ +{ + "version": 1, + "data": { + "event_message": "Hello, world!" + } +} +""" + +payload_5 = """\ +{ + "schema_id": "http://event.mock.jupyter.org/message", + "version": 1 +} +""" + +payload_6 = """\ +{ + "schema_id": "event.mock.jupyter.org/message", + "version": 1, + "data": { + "event_message": "Hello, world!" + }, + "timestamp": "2022-05-26 12:50:00" +} +""" + + +@pytest.mark.parametrize("payload", [payload_3, payload_4, payload_5, payload_6]) +async def test_post_event_400(jp_fetch, event_logger, payload): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "events", method="POST", body=payload) + + assert expected_http_error(e, 400) + + +payload_7 = """\ +{ + "schema_id": "http://event.mock.jupyter.org/message", + "version": 1, + "data": { + "message": "Hello, world!" + } +} +""" + +payload_8 = """\ +{ + "schema_id": "http://event.mock.jupyter.org/message", + "version": 2, + "data": { + "message": "Hello, world!" + } +} +""" + + +@pytest.mark.parametrize("payload", [payload_7, payload_8]) +async def test_post_event_500(jp_fetch, event_logger, payload): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await jp_fetch("api", "events", method="POST", body=payload) + + assert expected_http_error(e, 500) diff --git a/tests/services/events/test_extension.py b/tests/services/events/test_extension.py new file mode 100644 index 0000000000..c20e6f79b6 --- /dev/null +++ b/tests/services/events/test_extension.py @@ -0,0 +1,32 @@ +import json + +import pytest + + +@pytest.fixture() +def jp_server_config(): + config = { + "ServerApp": { + "jpserver_extensions": {"tests.services.events.mockextension": True}, + }, + "EventBus": {"allowed_schemas": ["http://event.mockextension.jupyter.org/message"]}, + } + return config + + +async def test_subscribe_websocket(jp_ws_fetch, jp_fetch): + # Open an event listener websocket + ws = await jp_ws_fetch("/api/events/subscribe") + + # Hit the extension endpoint that emits an event + await jp_fetch("/mock/event") + + # Check the event listener for a message + message = await ws.read_message() + event_data = json.loads(message) + + # Close websocket + ws.close() + + # Verify that an event message was received. + assert event_data.get("event_message") == "Hello world, from mock extension!" diff --git a/tests/services/kernels/test_api.py b/tests/services/kernels/test_api.py index bb91a588e4..c1b98e7269 100644 --- a/tests/services/kernels/test_api.py +++ b/tests/services/kernels/test_api.py @@ -1,10 +1,13 @@ +import asyncio import json import os import time +import warnings import jupyter_client import pytest import tornado +from flaky import flaky from jupyter_client.kernelspec import NATIVE_KERNEL_NAME from tornado.httpclient import HTTPClientError @@ -15,19 +18,37 @@ TEST_TIMEOUT = 60 -@pytest.fixture +@pytest.fixture(autouse=True) +def suppress_deprecation_warnings(): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="The synchronous MappingKernelManager", + category=DeprecationWarning, + ) + yield + + +@pytest.fixture() def pending_kernel_is_ready(jp_serverapp): - async def _(kernel_id): + async def _(kernel_id, ready=None): km = jp_serverapp.kernel_manager if getattr(km, "use_pending_kernels", False): kernel = km.get_kernel(kernel_id) if getattr(kernel, "ready", None): - await kernel.ready + new_ready = kernel.ready + # Make sure we get a new ready promise (for a restart) + while new_ready == ready: + await asyncio.sleep(0.1) + if not isinstance(new_ready, asyncio.Future): + new_ready = asyncio.wrap_future(new_ready) + await new_ready + return new_ready return _ -configs = [ +configs: list = [ { "ServerApp": { "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.MappingKernelManager" @@ -67,7 +88,7 @@ async def test_no_kernels(jp_fetch): @pytest.mark.timeout(TEST_TIMEOUT) -async def test_default_kernels(jp_fetch, jp_base_url, jp_cleanup_subprocesses): +async def test_default_kernels(jp_fetch, jp_base_url): r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) assert r.headers["location"] == url_path_join(jp_base_url, "/api/kernels/", kernel["id"]) @@ -79,13 +100,10 @@ async def test_default_kernels(jp_fetch, jp_base_url, jp_cleanup_subprocesses): ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] ) assert r.headers["Content-Security-Policy"] == expected_csp - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_main_kernel_handler( - jp_fetch, jp_base_url, jp_cleanup_subprocesses, jp_serverapp, pending_kernel_is_ready -): +async def test_main_kernel_handler(jp_fetch, jp_base_url, jp_serverapp, pending_kernel_is_ready): # Start the first kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) @@ -108,6 +126,7 @@ async def test_main_kernel_handler( assert isinstance(kernel_list, list) assert kernel_list[0]["id"] == kernel1["id"] assert kernel_list[0]["name"] == kernel1["name"] + await pending_kernel_is_ready(kernel1["id"]) # Start a second kernel r = await jp_fetch( @@ -115,6 +134,7 @@ async def test_main_kernel_handler( ) kernel2 = json.loads(r.body.decode()) assert isinstance(kernel2, dict) + await pending_kernel_is_ready(kernel1["id"]) # Get kernel list again r = await jp_fetch("api", "kernels", method="GET") @@ -136,7 +156,7 @@ async def test_main_kernel_handler( assert r.code == 204 # Restart a kernel - await pending_kernel_is_ready(kernel2["id"]) + ready = await pending_kernel_is_ready(kernel2["id"]) r = await jp_fetch( "api", "kernels", @@ -148,6 +168,9 @@ async def test_main_kernel_handler( restarted_kernel = json.loads(r.body.decode()) assert restarted_kernel["id"] == kernel2["id"] assert restarted_kernel["name"] == kernel2["name"] + # Make sure we get a new ready promise + if ready: + await pending_kernel_is_ready(kernel2["id"], ready) # Start a kernel with a path r = await jp_fetch( @@ -158,11 +181,11 @@ async def test_main_kernel_handler( ) kernel3 = json.loads(r.body.decode()) assert isinstance(kernel3, dict) - await jp_cleanup_subprocesses() + await pending_kernel_is_ready(kernel3["id"]) @pytest.mark.timeout(TEST_TIMEOUT) -async def test_kernel_handler(jp_fetch, jp_cleanup_subprocesses, pending_kernel_is_ready): +async def test_kernel_handler(jp_fetch, jp_serverapp, pending_kernel_is_ready): # Create a kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) @@ -182,7 +205,7 @@ async def test_kernel_handler(jp_fetch, jp_cleanup_subprocesses, pending_kernel_ assert expected_http_error(e, 404) # Delete kernel with id. - await pending_kernel_is_ready(kernel_id) + ready = await pending_kernel_is_ready(kernel_id) r = await jp_fetch( "api", "kernels", @@ -193,7 +216,7 @@ async def test_kernel_handler(jp_fetch, jp_cleanup_subprocesses, pending_kernel_ # Get list of kernels try: - await pending_kernel_is_ready(kernel_id) + await pending_kernel_is_ready(kernel_id, ready) # If the kernel is already deleted, no need to await. except tornado.web.HTTPError: pass @@ -206,13 +229,10 @@ async def test_kernel_handler(jp_fetch, jp_cleanup_subprocesses, pending_kernel_ with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("api", "kernels", bad_id, method="DELETE") assert expected_http_error(e, 404, "Kernel does not exist: " + bad_id) - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_kernel_handler_startup_error( - jp_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs -): +async def test_kernel_handler_startup_error(jp_fetch, jp_serverapp, jp_kernelspecs): if getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return @@ -223,7 +243,7 @@ async def test_kernel_handler_startup_error( @pytest.mark.timeout(TEST_TIMEOUT) async def test_kernel_handler_startup_error_pending( - jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs + jp_fetch, jp_ws_fetch, jp_serverapp, jp_kernelspecs ): if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return @@ -237,10 +257,9 @@ async def test_kernel_handler_startup_error_pending( await jp_ws_fetch("api", "kernels", kid, "channels") +@flaky @pytest.mark.timeout(TEST_TIMEOUT) -async def test_connection( - jp_fetch, jp_ws_fetch, jp_http_port, jp_auth_header, jp_cleanup_subprocesses -): +async def test_connection(jp_fetch, jp_ws_fetch, jp_http_port, jp_auth_header): # Create kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) @@ -274,4 +293,3 @@ async def test_connection( r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 0 - await jp_cleanup_subprocesses() diff --git a/tests/services/kernels/test_config.py b/tests/services/kernels/test_config.py index 9b58a8c283..8f779bb1dd 100644 --- a/tests/services/kernels/test_config.py +++ b/tests/services/kernels/test_config.py @@ -4,7 +4,7 @@ from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager -@pytest.fixture +@pytest.fixture() def jp_server_config(): return Config( {"ServerApp": {"MappingKernelManager": {"allowed_message_types": ["kernel_info_request"]}}} @@ -15,9 +15,17 @@ def test_config(jp_serverapp): assert jp_serverapp.kernel_manager.allowed_message_types == ["kernel_info_request"] -async def test_async_kernel_manager(jp_configurable_serverapp): +def test_async_kernel_manager(jp_configurable_serverapp): argv = [ "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager" ] app = jp_configurable_serverapp(argv=argv) assert isinstance(app.kernel_manager, AsyncMappingKernelManager) + + +def test_not_server_kernel_manager(jp_configurable_serverapp): + argv = [ + "--AsyncMappingKernelManager.kernel_manager_class=jupyter_client.ioloop.manager.AsyncIOLoopKernelManager" + ] + with pytest.warns(FutureWarning, match="is not a subclass of 'ServerKernelManager'"): + jp_configurable_serverapp(argv=argv) diff --git a/tests/services/kernels/test_connection.py b/tests/services/kernels/test_connection.py new file mode 100644 index 0000000000..3316105222 --- /dev/null +++ b/tests/services/kernels/test_connection.py @@ -0,0 +1,45 @@ +import asyncio +import json +from unittest.mock import MagicMock + +from jupyter_client.jsonutil import json_clean, json_default +from jupyter_client.session import Session +from tornado.httpserver import HTTPRequest + +from jupyter_server.serverapp import ServerApp +from jupyter_server.services.kernels.connection.channels import ZMQChannelsWebsocketConnection +from jupyter_server.services.kernels.websocket import KernelWebsocketHandler + + +async def test_websocket_connection(jp_serverapp: ServerApp) -> None: + app = jp_serverapp + kernel_id = await app.kernel_manager.start_kernel() # type:ignore[has-type] + kernel = app.kernel_manager.get_kernel(kernel_id) + request = HTTPRequest("foo", "GET") + request.connection = MagicMock() + handler = KernelWebsocketHandler(app.web_app, request) + handler.ws_connection = MagicMock() + handler.ws_connection.is_closing = lambda: False + conn = ZMQChannelsWebsocketConnection(parent=kernel, websocket_handler=handler) + handler.connection = conn + await conn.prepare() + conn.connect() + await asyncio.wrap_future(conn.nudge()) + session: Session = kernel.session + msg = session.msg("data_pub", content={"a": "b"}) + data = json.dumps( + json_clean(msg), + default=json_default, + ensure_ascii=False, + allow_nan=False, + ) + conn.handle_incoming_message(data) + conn.handle_outgoing_message("iopub", session.serialize(msg)) + assert ( + conn.websocket_handler.select_subprotocol(["v1.kernel.websocket.jupyter.org"]) + == "v1.kernel.websocket.jupyter.org" + ) + conn.write_stderr("test", {}) + conn.on_kernel_restarted() + conn.on_restart_failed() + conn._on_error("shell", msg, session.serialize(msg)) diff --git a/tests/services/kernels/test_cull.py b/tests/services/kernels/test_cull.py index 97f1a57bbe..f370f9c5ef 100644 --- a/tests/services/kernels/test_cull.py +++ b/tests/services/kernels/test_cull.py @@ -2,6 +2,7 @@ import json import os import platform +import warnings import jupyter_client import pytest @@ -11,6 +12,38 @@ CULL_TIMEOUT = 30 if platform.python_implementation() == "PyPy" else 5 CULL_INTERVAL = 1 +sample_kernel_json_with_metadata = { + "argv": ["cat", "{connection_file}"], + "display_name": "Test kernel", + "metadata": {"cull_idle_timeout": 0}, +} + + +@pytest.fixture(autouse=True) +def suppress_deprecation_warnings(): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="The synchronous MappingKernelManager", + category=DeprecationWarning, + ) + yield + + +@pytest.fixture() +def jp_kernelspec_with_metadata(jp_data_dir): + """Configures some sample kernelspecs in the Jupyter data directory.""" + kenrel_spec_name = "sample_with_metadata" + sample_kernel_dir = jp_data_dir.joinpath("kernels", kenrel_spec_name) + sample_kernel_dir.mkdir(parents=True) + # Create kernel json file + sample_kernel_file = sample_kernel_dir.joinpath("kernel.json") + kernel_json = sample_kernel_json_with_metadata.copy() + sample_kernel_file.write_text(json.dumps(kernel_json)) + # Create resources text + sample_kernel_resources = sample_kernel_dir.joinpath("resource.txt") + sample_kernel_resources.write_text("resource") + @pytest.mark.parametrize( "jp_server_config", @@ -43,7 +76,7 @@ ), ], ) -async def test_cull_idle(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): +async def test_cull_idle(jp_fetch, jp_ws_fetch): r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) kid = kernel["id"] @@ -59,7 +92,24 @@ async def test_cull_idle(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): ws.close() culled = await get_cull_status(kid, jp_fetch) # not connected, should be culled assert culled - await jp_cleanup_subprocesses() + + +async def test_cull_idle_disable(jp_fetch, jp_ws_fetch, jp_kernelspec_with_metadata): + r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) + kernel = json.loads(r.body.decode()) + kid = kernel["id"] + + # Open a websocket connection. + ws = await jp_ws_fetch("api", "kernels", kid, "channels") + + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + assert model["connections"] == 1 + culled = await get_cull_status(kid, jp_fetch) # connected, should not be culled + assert not culled + ws.close() + culled = await get_cull_status(kid, jp_fetch) # not connected, should not be culled + assert not culled # Pending kernels was released in Jupyter Client 7.1 @@ -89,9 +139,7 @@ async def test_cull_idle(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): ], ) @pytest.mark.timeout(30) -async def test_cull_dead( - jp_fetch, jp_ws_fetch, jp_serverapp, jp_cleanup_subprocesses, jp_kernelspecs -): +async def test_cull_dead(jp_fetch, jp_ws_fetch, jp_serverapp, jp_kernelspecs): r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) kid = kernel["id"] @@ -105,7 +153,6 @@ async def test_cull_dead( assert model["connections"] == 0 culled = await get_cull_status(kid, jp_fetch) # connected, should not be culled assert culled - await jp_cleanup_subprocesses() async def get_cull_status(kid, jp_fetch): diff --git a/tests/services/kernels/test_events.py b/tests/services/kernels/test_events.py new file mode 100644 index 0000000000..9bdfb03672 --- /dev/null +++ b/tests/services/kernels/test_events.py @@ -0,0 +1,77 @@ +import pytest +from jupyter_client.manager import AsyncKernelManager +from tornado import web + +from jupyter_server.services.kernels.kernelmanager import ServerKernelManager + +pytest_plugins = ["jupyter_events.pytest_plugin"] + + +@pytest.mark.parametrize("action", ["start", "restart", "interrupt", "shutdown"]) +async def test_kernel_action_success_event( + monkeypatch, action, jp_read_emitted_events, jp_event_handler +): + manager = ServerKernelManager() + manager.event_logger.register_handler(jp_event_handler) + + async def mock_method(self, *args, **kwargs): + self.kernel_id = "x-x-x-x-x" + + monkeypatch.setattr(AsyncKernelManager, f"{action}_kernel", mock_method) + + await getattr(manager, f"{action}_kernel")() + + output = jp_read_emitted_events()[0] + assert "action" in output and output["action"] == action + assert "msg" in output + assert "kernel_id" in output + assert "status" in output and output["status"] == "success" + + +@pytest.mark.parametrize("action", ["start", "restart", "interrupt", "shutdown"]) +async def test_kernel_action_failed_event( + monkeypatch, action, jp_read_emitted_events, jp_event_handler +): + manager = ServerKernelManager() + manager.event_logger.register_handler(jp_event_handler) + + async def mock_method(self, *args, **kwargs): + self.kernel_id = "x-x-x-x-x" + raise Exception + + monkeypatch.setattr(AsyncKernelManager, f"{action}_kernel", mock_method) + + with pytest.raises(Exception): # noqa: B017 + await getattr(manager, f"{action}_kernel")() + + output = jp_read_emitted_events()[0] + assert "action" in output and output["action"] == action + assert "msg" in output + assert "kernel_id" in output + assert "status" in output and output["status"] == "error" + + +@pytest.mark.parametrize("action", ["start", "restart", "interrupt", "shutdown"]) +async def test_kernel_action_http_error_event( + monkeypatch, action, jp_read_emitted_events, jp_event_handler +): + manager = ServerKernelManager() + manager.event_logger.register_handler(jp_event_handler) + + log_message = "This http request failed." + + async def mock_method(self, *args, **kwargs): + self.kernel_id = "x-x-x-x-x" + raise web.HTTPError(status_code=500, log_message=log_message) + + monkeypatch.setattr(AsyncKernelManager, f"{action}_kernel", mock_method) + + with pytest.raises(web.HTTPError): + await getattr(manager, f"{action}_kernel")() + + output = jp_read_emitted_events()[0] + assert "action" in output and output["action"] == action + assert "msg" in output and output["msg"] == log_message + assert "kernel_id" in output + assert "status" in output and output["status"] == "error" + assert "status_code" in output and output["status_code"] == 500 diff --git a/tests/services/kernelspecs/test_api.py b/tests/services/kernelspecs/test_api.py index ee14d6afb0..2a0765d593 100644 --- a/tests/services/kernelspecs/test_api.py +++ b/tests/services/kernelspecs/test_api.py @@ -1,13 +1,16 @@ import json import pytest -import tornado -from jupyter_client.kernelspec import NATIVE_KERNEL_NAME +from tornado.httpclient import HTTPClientError + +from jupyter_server.serverapp import ServerApp from ...utils import expected_http_error, some_resource -async def test_list_kernelspecs_bad(jp_fetch, jp_kernelspecs, jp_data_dir): +async def test_list_kernelspecs_bad(jp_fetch, jp_kernelspecs, jp_data_dir, jp_serverapp): + app: ServerApp = jp_serverapp + default = app.kernel_manager.default_kernel_name bad_kernel_dir = jp_data_dir.joinpath(jp_data_dir, "kernels", "bad2") bad_kernel_dir.mkdir(parents=True) bad_kernel_json = bad_kernel_dir.joinpath("kernel.json") @@ -16,17 +19,19 @@ async def test_list_kernelspecs_bad(jp_fetch, jp_kernelspecs, jp_data_dir): r = await jp_fetch("api", "kernelspecs", method="GET") model = json.loads(r.body.decode()) assert isinstance(model, dict) - assert model["default"] == NATIVE_KERNEL_NAME + assert model["default"] == default specs = model["kernelspecs"] assert isinstance(specs, dict) assert len(specs) > 2 -async def test_list_kernelspecs(jp_fetch, jp_kernelspecs): +async def test_list_kernelspecs(jp_fetch, jp_kernelspecs, jp_serverapp): + app: ServerApp = jp_serverapp + default = app.kernel_manager.default_kernel_name r = await jp_fetch("api", "kernelspecs", method="GET") model = json.loads(r.body.decode()) assert isinstance(model, dict) - assert model["default"] == NATIVE_KERNEL_NAME + assert model["default"] == default specs = model["kernelspecs"] assert isinstance(specs, dict) assert len(specs) > 2 @@ -35,7 +40,7 @@ def is_sample_kernelspec(s): return s["name"] == "sample" and s["spec"]["display_name"] == "Test kernel" def is_default_kernelspec(s): - return s["name"] == NATIVE_KERNEL_NAME and s["spec"]["display_name"].startswith("Python") + return s["name"] == default assert any(is_sample_kernelspec(s) for s in specs.values()), specs assert any(is_default_kernelspec(s) for s in specs.values()), specs @@ -51,8 +56,8 @@ async def test_get_kernelspecs(jp_fetch, jp_kernelspecs): async def test_get_nonexistant_kernelspec(jp_fetch, jp_kernelspecs): - with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch("api", "kernelspecs", "nonexistant", method="GET") + with pytest.raises(HTTPClientError) as e: + await jp_fetch("api", "kernelspecs", "nonexistent", method="GET") assert expected_http_error(e, 404) @@ -63,10 +68,10 @@ async def test_get_kernel_resource_file(jp_fetch, jp_kernelspecs): async def test_get_nonexistant_resource(jp_fetch, jp_kernelspecs): - with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch("kernelspecs", "nonexistant", "resource.txt", method="GET") + with pytest.raises(HTTPClientError) as e: + await jp_fetch("kernelspecs", "nonexistent", "resource.txt", method="GET") assert expected_http_error(e, 404) - with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await jp_fetch("kernelspecs", "sample", "nonexistant.txt", method="GET") + with pytest.raises(HTTPClientError) as e: + await jp_fetch("kernelspecs", "sample", "nonexistent.txt", method="GET") assert expected_http_error(e, 404) diff --git a/tests/services/sessions/test_api.py b/tests/services/sessions/test_api.py index 19c165cbb1..a4aa0a73e5 100644 --- a/tests/services/sessions/test_api.py +++ b/tests/services/sessions/test_api.py @@ -1,11 +1,15 @@ +import asyncio import json import os import shutil import time +import warnings +from typing import Any import jupyter_client import pytest import tornado +from flaky import flaky from jupyter_client.ioloop import AsyncIOLoopKernelManager from nbformat import writes from nbformat.v4 import new_notebook @@ -17,7 +21,18 @@ from ...utils import expected_http_error -TEST_TIMEOUT = 60 +TEST_TIMEOUT = 10 + + +@pytest.fixture(autouse=True) +def suppress_deprecation_warnings(): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="The synchronous MappingKernelManager", + category=DeprecationWarning, + ) + yield def j(r): @@ -29,7 +44,9 @@ class NewPortsKernelManager(AsyncIOLoopKernelManager): def _default_cache_ports(self) -> bool: return False - async def restart_kernel(self, now: bool = False, newports: bool = True, **kw) -> None: + async def restart_kernel( # type:ignore[override] + self, now: bool = False, newports: bool = True, **kw: Any + ) -> None: self.log.debug(f"DEBUG**** calling super().restart_kernel with newports={newports}") return await super().restart_kernel(now=now, newports=newports, **kw) @@ -41,7 +58,7 @@ def _default_kernel_manager_class(self): return "tests.services.sessions.test_api.NewPortsKernelManager" -configs = [ +configs: list = [ { "ServerApp": { "kernel_manager_class": "jupyter_server.services.kernels.kernelmanager.MappingKernelManager" @@ -65,7 +82,7 @@ def _default_kernel_manager_class(self): # See https://github.com/jupyter-server/jupyter_server/issues/672 if os.name != "nt" and jupyter_client._version.version_info >= (7, 1): # Add a pending kernels condition - c = { + c: dict = { "ServerApp": { "kernel_manager_class": "tests.services.sessions.test_api.NewPortsMappingKernelManager" }, @@ -147,7 +164,7 @@ async def cleanup(self): time.sleep(0.1) -@pytest.fixture +@pytest.fixture() def session_is_ready(jp_serverapp): """Wait for the kernel started by a session to be ready. @@ -162,12 +179,15 @@ async def _(session_id): kernel_id = session["kernel"]["id"] kernel = mkm.get_kernel(kernel_id) if getattr(kernel, "ready", None): - await kernel.ready + ready = kernel.ready + if not isinstance(ready, asyncio.Future): + ready = asyncio.wrap_future(ready) + await ready return _ -@pytest.fixture +@pytest.fixture() def session_client(jp_root_dir, jp_fetch): subdir = jp_root_dir.joinpath("foo") subdir.mkdir() @@ -211,7 +231,7 @@ def assert_session_equality(actual, expected): @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create(session_client, jp_base_url, jp_cleanup_subprocesses, jp_serverapp): +async def test_create(session_client, jp_base_url, jp_serverapp): # Make sure no sessions exist. resp = await session_client.list() sessions = j(resp) @@ -251,14 +271,9 @@ async def test_create(session_client, jp_base_url, jp_cleanup_subprocesses, jp_s got = j(resp) assert_session_equality(got, new_session) - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_bad( - session_client, jp_base_url, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs -): +async def test_create_bad(session_client, jp_base_url, jp_serverapp, jp_kernelspecs): if getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return @@ -272,16 +287,12 @@ async def test_create_bad( with pytest.raises(HTTPClientError): await session_client.create("foo/nb1.ipynb") - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) async def test_create_bad_pending( session_client, jp_base_url, jp_ws_fetch, - jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs, ): @@ -310,14 +321,9 @@ async def test_create_bad_pending( if os.name != "nt": assert "non_existent_path" in session["kernel"]["reason"] - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_file_session( - session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_create_file_session(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.py", type="file") assert resp.code == 201 newsession = j(resp) @@ -325,41 +331,31 @@ async def test_create_file_session( assert newsession["type"] == "file" sid = newsession["id"] await session_is_ready(sid) - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_console_session( - session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_create_console_session(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/abc123", type="console") assert resp.code == 201 newsession = j(resp) assert newsession["path"] == "foo/abc123" assert newsession["type"] == "console" - # Need to find a better solution to this. sid = newsession["id"] await session_is_ready(sid) - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_deprecated(session_client, jp_cleanup_subprocesses, jp_serverapp): +async def test_create_deprecated(session_client, jp_serverapp): resp = await session_client.create_deprecated("foo/nb1.ipynb") assert resp.code == 201 newsession = j(resp) assert newsession["path"] == "foo/nb1.ipynb" assert newsession["type"] == "notebook" assert newsession["notebook"]["path"] == "foo/nb1.ipynb" - # Need to find a better solution to this. - sid = newsession["id"] - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_with_kernel_id( - session_client, jp_fetch, jp_base_url, jp_cleanup_subprocesses, jp_serverapp -): +async def test_create_with_kernel_id(session_client, jp_fetch, jp_base_url, jp_serverapp): # create a new kernel resp = await jp_fetch("api/kernels", method="POST", allow_nonstandard_methods=True) kernel = j(resp) @@ -384,14 +380,10 @@ async def test_create_with_kernel_id( resp = await session_client.get(sid) got = j(resp) assert_session_equality(got, new_session) - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_with_bad_kernel_id( - session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_create_with_bad_kernel_id(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.py", type="file") assert resp.code == 201 newsession = j(resp) @@ -401,11 +393,10 @@ async def test_create_with_bad_kernel_id( # TODO assert newsession["path"] == "foo/nb1.py" assert newsession["type"] == "file" - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_delete(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): +async def test_delete(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) @@ -422,12 +413,10 @@ async def test_delete(session_client, jp_cleanup_subprocesses, jp_serverapp, ses with pytest.raises(tornado.httpclient.HTTPClientError) as e: await session_client.get(sid) assert expected_http_error(e, 404) - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_path(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): +async def test_modify_path(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] @@ -437,14 +426,10 @@ async def test_modify_path(session_client, jp_cleanup_subprocesses, jp_serverapp changed = j(resp) assert changed["id"] == sid assert changed["path"] == "nb2.ipynb" - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_path_deprecated( - session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_modify_path_deprecated(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] @@ -454,12 +439,10 @@ async def test_modify_path_deprecated( changed = j(resp) assert changed["id"] == sid assert changed["notebook"]["path"] == "nb2.ipynb" - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_type(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): +async def test_modify_type(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] @@ -469,14 +452,10 @@ async def test_modify_type(session_client, jp_cleanup_subprocesses, jp_serverapp changed = j(resp) assert changed["id"] == sid assert changed["type"] == "console" - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_kernel_name( - session_client, jp_fetch, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_modify_kernel_name(session_client, jp_fetch, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") before = j(resp) sid = before["id"] @@ -497,14 +476,9 @@ async def test_modify_kernel_name( if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): assert kernel_list == [after["kernel"]] - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_kernel_id( - session_client, jp_fetch, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_modify_kernel_id(session_client, jp_fetch, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") before = j(resp) sid = before["id"] @@ -532,14 +506,10 @@ async def test_modify_kernel_id( if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): assert kernel_list == [kernel] - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - +@flaky @pytest.mark.timeout(TEST_TIMEOUT) -async def test_restart_kernel( - session_client, jp_base_url, jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses, session_is_ready -): +async def test_restart_kernel(session_client, jp_base_url, jp_fetch, jp_ws_fetch, session_is_ready): # Create a session. resp = await session_client.create("foo/nb1.ipynb") assert resp.code == 201 @@ -590,12 +560,16 @@ async def test_restart_kernel( model = json.loads(r.body.decode()) assert model["connections"] == 0 - # Open a websocket connection. - await jp_ws_fetch("api", "kernels", kid, "channels") + # Open a new websocket connection. + ws2 = await jp_ws_fetch("api", "kernels", kid, "channels") - r = await jp_fetch("api", "kernels", kid, method="GET") - model = json.loads(r.body.decode()) - assert model["connections"] == 1 + # give it some time to close on the other side: + for _ in range(10): + r = await jp_fetch("api", "kernels", kid, method="GET") + model = json.loads(r.body.decode()) + if model["connections"] == 0: + time.sleep(0.1) + else: + break - # Need to find a better solution to this. - await jp_cleanup_subprocesses() + ws2.close() diff --git a/tests/services/sessions/test_manager.py b/tests/services/sessions/test_manager.py index a67dd6398e..9af04f2268 100644 --- a/tests/services/sessions/test_manager.py +++ b/tests/services/sessions/test_manager.py @@ -1,4 +1,5 @@ import asyncio +from datetime import datetime import pytest from tornado import web @@ -8,6 +9,7 @@ from jupyter_server.services.contents.manager import ContentsManager from jupyter_server.services.kernels.kernelmanager import MappingKernelManager from jupyter_server.services.sessions.sessionmanager import ( + KernelName, KernelSessionRecord, KernelSessionRecordConflict, KernelSessionRecordList, @@ -16,9 +18,15 @@ class DummyKernel: + execution_state: str + last_activity: datetime + def __init__(self, kernel_name="python"): self.kernel_name = kernel_name + def update_env(self, *args, **kwargs): + pass + dummy_date = utcnow() dummy_date_s = isoformat(dummy_date) @@ -34,7 +42,7 @@ def __init__(self, *args, **kwargs): def _new_id(self): return next(self.id_letters) - async def start_kernel(self, kernel_id=None, path=None, kernel_name="python", **kwargs): + async def start_kernel(self, *, kernel_id=None, path=None, kernel_name="python", **kwargs): kernel_id = kernel_id or self._new_id() k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name) self._kernel_connections[kernel_id] = 0 @@ -47,7 +55,7 @@ async def shutdown_kernel(self, kernel_id, now=False): class SlowStartingKernelsMKM(MockMKM): - async def start_kernel(self, kernel_id=None, path=None, kernel_name="python", **kwargs): + async def start_kernel(self, *, kernel_id=None, path=None, kernel_name="python", **kwargs): await asyncio.sleep(1.0) return await super().start_kernel( kernel_id=kernel_id, path=path, kernel_name=kernel_name, **kwargs @@ -58,7 +66,7 @@ async def shutdown_kernel(self, kernel_id, now=False): await super().shutdown_kernel(kernel_id, now=now) -@pytest.fixture +@pytest.fixture() def session_manager(): return SessionManager(kernel_manager=MockMKM(), contents_manager=ContentsManager()) @@ -132,7 +140,7 @@ def test_kernel_record_list(): # Test .get() r_ = records.get(r) assert r == r_ - r_ = records.get(r.kernel_id) + r_ = records.get(r.kernel_id or "") assert r == r_ with pytest.raises(ValueError): @@ -416,7 +424,7 @@ async def test_good_database_filepath(jp_runtime_dir): ) await session_manager.create_session( - path="/path/to/test.ipynb", kernel_name="python", type="notebook" + path="/path/to/test.ipynb", kernel_name=KernelName("python"), type="notebook" ) # Assert that the database file exists assert empty_file.exists() @@ -448,7 +456,7 @@ async def test_session_persistence(jp_runtime_dir): ) session = await session_manager.create_session( - path="/path/to/test.ipynb", kernel_name="python", type="notebook" + path="/path/to/test.ipynb", kernel_name=KernelName("python"), type="notebook" ) # Assert that the database file exists @@ -479,7 +487,7 @@ async def test_pending_kernel(): ) # Create a session with a slow starting kernel fut = session_manager.create_session( - path="/path/to/test.ipynb", kernel_name="python", type="notebook" + path="/path/to/test.ipynb", kernel_name=KernelName("python"), type="notebook" ) task = asyncio.create_task(fut) await asyncio.sleep(0.1) @@ -503,10 +511,10 @@ async def test_pending_kernel(): # Test multiple, parallel pending kernels fut1 = session_manager.create_session( - path="/path/to/test.ipynb", kernel_name="python", type="notebook" + path="/path/to/test.ipynb", kernel_name=KernelName("python"), type="notebook" ) fut2 = session_manager.create_session( - path="/path/to/test.ipynb", kernel_name="python", type="notebook" + path="/path/to/test.ipynb", kernel_name=KernelName("python"), type="notebook" ) task1 = asyncio.create_task(fut1) await asyncio.sleep(0.1) diff --git a/tests/test_files.py b/tests/test_files.py index 7fac8419d4..9f6105090e 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -1,15 +1,26 @@ import json import os from pathlib import Path +from unittest.mock import patch import pytest -import tornado from nbformat import writes from nbformat.v4 import new_code_cell, new_markdown_cell, new_notebook, new_output +from tornado.httpclient import HTTPClientError from .utils import expected_http_error +@pytest.fixture( + params=[ + "jupyter_server.files.handlers.FilesHandler", + "jupyter_server.base.handlers.AuthenticatedFileHandler", + ] +) +def jp_argv(request): + return ["--ContentsManager.files_handler_class=" + request.param] + + @pytest.fixture( params=[ [False, ["å b"]], @@ -27,10 +38,23 @@ async def fetch_expect_200(jp_fetch, *path_parts): assert r.body.decode() == path_parts[-1], (path_parts, r.body) -async def fetch_expect_404(jp_fetch, *path_parts): - with pytest.raises(tornado.httpclient.HTTPClientError) as e: +async def fetch_expect_error(jp_fetch, code, *path_parts): + with pytest.raises(HTTPClientError) as e: await jp_fetch("files", *path_parts, method="GET") - assert expected_http_error(e, 404), [path_parts, e] + assert expected_http_error(e, code), [path_parts, e] + + +async def fetch_expect_404(jp_fetch, *path_parts): + return await fetch_expect_error(jp_fetch, 404, *path_parts) + + +async def test_file_types(jp_fetch, jp_root_dir): + path = Path(jp_root_dir, "test") + path.mkdir(parents=True, exist_ok=True) + foos = ["foo.tar.gz", "foo.bz", "foo.foo"] + for foo in foos: + (path / foo).write_text(foo) + await fetch_expect_200(jp_fetch, "test", foo) async def test_hidden_files(jp_fetch, jp_serverapp, jp_root_dir, maybe_hidden): @@ -55,6 +79,36 @@ async def test_hidden_files(jp_fetch, jp_serverapp, jp_root_dir, maybe_hidden): await fetch_expect_200(jp_fetch, *path_parts, foo) +@patch( + "jupyter_core.paths.is_hidden", + side_effect=AssertionError("Should not call is_hidden if not important"), +) +@patch( + "jupyter_server.services.contents.filemanager.is_hidden", + side_effect=AssertionError("Should not call is_hidden if not important"), +) +@patch( + "jupyter_server.base.handlers.is_hidden", + side_effect=AssertionError("Should not call is_hidden if not important"), +) +async def test_regression_is_hidden(m1, m2, m3, jp_fetch, jp_serverapp, jp_root_dir): + path_parts = [".hidden", "foo"] + path = Path(jp_root_dir, *path_parts) + path.mkdir(parents=True, exist_ok=True) + + foos = ["foo", ".foo"] + for foo in foos: + (path / foo).write_text(foo) + + jp_serverapp.contents_manager.allow_hidden = True + for foo in foos: + await fetch_expect_200(jp_fetch, *path_parts, foo) + + jp_serverapp.contents_manager.allow_hidden = False + for foo in foos: + await fetch_expect_error(jp_fetch, 500, *path_parts, foo) + + async def test_contents_manager(jp_fetch, jp_serverapp, jp_root_dir): """make sure ContentsManager returns right files (ipynb, bin, txt). Also test save file hooks.""" @@ -85,7 +139,7 @@ async def test_contents_manager(jp_fetch, jp_serverapp, jp_root_dir): r = await jp_fetch("files/test.txt", method="GET") assert r.code == 200 - assert r.headers["content-type"] == "text/plain; charset=UTF-8" + assert "text/plain" in r.headers["content-type"] assert r.body.decode() == "foobar" diff --git a/tests/test_gateway.py b/tests/test_gateway.py index d040999558..585650e2f0 100644 --- a/tests/test_gateway.py +++ b/tests/test_gateway.py @@ -1,21 +1,37 @@ """Test GatewayClient""" +import asyncio import json +import logging import os import uuid -from datetime import datetime -from io import StringIO -from unittest.mock import patch +from datetime import datetime, timedelta, timezone +from email.utils import format_datetime +from http.cookies import SimpleCookie +from io import BytesIO +from queue import Empty +from typing import Any, Dict, Union +from unittest.mock import MagicMock, patch import pytest import tornado +from jupyter_core.utils import ensure_async +from tornado.concurrent import Future from tornado.httpclient import HTTPRequest, HTTPResponse +from tornado.httputil import HTTPServerRequest +from tornado.queues import Queue from tornado.web import HTTPError +from traitlets import Int, Unicode +from traitlets.config import Config -from jupyter_server.gateway.managers import GatewayClient -from jupyter_server.utils import ensure_async +from jupyter_server.gateway.connections import GatewayWebSocketConnection +from jupyter_server.gateway.gateway_client import GatewayTokenRenewerBase, NoOpTokenRenewer +from jupyter_server.gateway.managers import ChannelQueue, GatewayClient, GatewayKernelManager +from jupyter_server.services.kernels.websocket import KernelWebsocketHandler from .utils import expected_http_error +pytest_plugins = ["jupyter_events.pytest_plugin"] + def generate_kernelspec(name): argv_stanza = ["python", "-m", "ipykernel_launcher", "-f", "{connection_file}"] @@ -29,12 +45,19 @@ def generate_kernelspec(name): "metadata": {}, } } - kernelspec_stanza = {"name": name, "spec": spec_stanza, "resources": {}} + kernelspec_stanza = { + "name": name, + "spec": spec_stanza, + "resources": { + "logo-64x64": f"f/kernelspecs/{name}/logo-64x64.png", + "url": "https://example.com/example-url", + }, + } return kernelspec_stanza # We'll mock up two kernelspecs - kspec_foo and kspec_bar -kernelspecs = { +kernelspecs: dict = { "default": "kspec_foo", "kernelspecs": { "kspec_foo": generate_kernelspec("kspec_foo"), @@ -46,10 +69,16 @@ def generate_kernelspec(name): # maintain a dictionary of expected running kernels. Key = kernel_id, Value = model. running_kernels = {} +# Dictionary of kernels to transiently omit from list results. +# +# This is used to simulate inconsistency in list results from the Gateway server +# due to issues like race conditions, bugs, etc. +omitted_kernels: Dict[str, bool] = {} + def generate_model(name): """Generate a mocked kernel model. Caller is responsible for adding model to running_kernels dictionary.""" - dt = datetime.utcnow().isoformat() + "Z" + dt = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z") kernel_id = str(uuid.uuid4()) model = { "id": kernel_id, @@ -72,21 +101,28 @@ async def mock_gateway_request(url, **kwargs): # Fetch all kernelspecs if endpoint.endswith("/api/kernelspecs") and method == "GET": - response_buf = StringIO(json.dumps(kernelspecs)) + response_buf = BytesIO(json.dumps(kernelspecs).encode("utf-8")) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response # Fetch named kernelspec if endpoint.rfind("/api/kernelspecs/") >= 0 and method == "GET": requested_kernelspec = endpoint.rpartition("/")[2] - kspecs = kernelspecs.get("kernelspecs") + kspecs: dict = kernelspecs["kernelspecs"] if requested_kernelspec in kspecs: - response_buf = StringIO(json.dumps(kspecs.get(requested_kernelspec))) + response_str = json.dumps(kspecs.get(requested_kernelspec)) + response_buf = BytesIO(response_str.encode("utf-8")) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response else: raise HTTPError(404, message="Kernelspec does not exist: %s" % requested_kernelspec) + # Fetch kernelspec asset + if endpoint.rfind("/kernelspecs/") >= 0 and method == "GET": + response_buf = BytesIO(b"foo") + response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) + return response + # Create kernel if endpoint.endswith("/api/kernels") and method == "POST": json_body = json.loads(kwargs["body"]) @@ -94,19 +130,26 @@ async def mock_gateway_request(url, **kwargs): env = json_body.get("env") kspec_name = env.get("KERNEL_KSPEC_NAME") assert name == kspec_name # Ensure that KERNEL_ env values get propagated + # Verify env propagation is well-behaved... + assert "FOO" in env + assert "BAR" in env + assert "BAZ" not in env model = generate_model(name) running_kernels[model.get("id")] = model # Register model as a running kernel - response_buf = StringIO(json.dumps(model)) + response_buf = BytesIO(json.dumps(model).encode("utf-8")) response = await ensure_async(HTTPResponse(request, 201, buffer=response_buf)) return response # Fetch list of running kernels if endpoint.endswith("/api/kernels") and method == "GET": kernels = [] - for kernel_id in running_kernels.keys(): - model = running_kernels.get(kernel_id) - kernels.append(model) - response_buf = StringIO(json.dumps(kernels)) + for kernel_id in running_kernels: + if kernel_id in omitted_kernels: + omitted_kernels.pop(kernel_id) + else: + model = running_kernels.get(kernel_id) + kernels.append(model) + response_buf = BytesIO(json.dumps(kernels).encode("utf-8")) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response @@ -122,7 +165,8 @@ async def mock_gateway_request(url, **kwargs): raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) elif action == "restart": if requested_kernel_id in running_kernels: - response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) + response_str = json.dumps(running_kernels.get(requested_kernel_id)) + response_buf = BytesIO(response_str.encode("utf-8")) response = await ensure_async(HTTPResponse(request, 204, buffer=response_buf)) return response else: @@ -133,6 +177,9 @@ async def mock_gateway_request(url, **kwargs): # Shutdown existing kernel if endpoint.rfind("/api/kernels/") >= 0 and method == "DELETE": requested_kernel_id = endpoint.rpartition("/")[2] + if requested_kernel_id not in running_kernels: + raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) + running_kernels.pop( requested_kernel_id ) # Simulate shutdown by removing kernel from running set @@ -143,7 +190,8 @@ async def mock_gateway_request(url, **kwargs): if endpoint.rfind("/api/kernels/") >= 0 and method == "GET": requested_kernel_id = endpoint.rpartition("/")[2] if requested_kernel_id in running_kernels: - response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) + response_str = json.dumps(running_kernels.get(requested_kernel_id)) + response_buf = BytesIO(response_str.encode("utf-8")) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response else: @@ -155,7 +203,38 @@ async def mock_gateway_request(url, **kwargs): mock_http_user = "alice" -@pytest.fixture +def mock_websocket_create_connection(recv_side_effect=None): + def helper(*args, **kwargs): + mock = MagicMock() + mock.recv = MagicMock(side_effect=recv_side_effect) + return mock + + return helper + + +class CustomTestTokenRenewer(GatewayTokenRenewerBase): # type:ignore[misc] + TEST_EXPECTED_TOKEN_VALUE = "Use this token value: 42" + + # The following are configured by the config test to ensure they flow + # configured to: 42 + config_var_1: int = Int(config=True) # type:ignore[assignment] + # configured to: "Use this token value: " + config_var_2: str = Unicode(config=True) # type:ignore[assignment] + + def get_token( + self, auth_header_key: str, auth_scheme: Union[str, None], auth_token: str, **kwargs: Any + ) -> str: + return f"{self.config_var_2}{self.config_var_1}" + + +@pytest.fixture() +def jp_server_config(): + return Config( + {"CustomTestTokenRenewer": {"config_var_1": 42, "config_var_2": "Use this token value: "}} + ) + + +@pytest.fixture() def init_gateway(monkeypatch): """Initializes the server for use as a gateway client.""" # Clear the singleton first since previous tests may not have used a gateway. @@ -164,6 +243,12 @@ def init_gateway(monkeypatch): monkeypatch.setenv("JUPYTER_GATEWAY_HTTP_USER", mock_http_user) monkeypatch.setenv("JUPYTER_GATEWAY_REQUEST_TIMEOUT", "44.4") monkeypatch.setenv("JUPYTER_GATEWAY_CONNECT_TIMEOUT", "44.4") + monkeypatch.setenv("JUPYTER_GATEWAY_LAUNCH_TIMEOUT_PAD", "1.1") + monkeypatch.setenv("JUPYTER_GATEWAY_ACCEPT_COOKIES", "false") + monkeypatch.setenv("JUPYTER_GATEWAY_ENV_WHITELIST", "FOO,BAR") + monkeypatch.setenv("FOO", "foo") + monkeypatch.setenv("BAR", "bar") + monkeypatch.setenv("BAZ", "baz") yield GatewayClient.clear_instance() @@ -176,19 +261,22 @@ async def test_gateway_env_options(init_gateway, jp_serverapp): jp_serverapp.gateway_config.connect_timeout == jp_serverapp.gateway_config.request_timeout ) assert jp_serverapp.gateway_config.connect_timeout == 44.4 + assert jp_serverapp.gateway_config.launch_timeout_pad == 1.1 + assert jp_serverapp.gateway_config.accept_cookies is False + assert jp_serverapp.gateway_config.allowed_envs == "FOO,BAR" - GatewayClient.instance().init_static_args() - assert GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == int( - jp_serverapp.gateway_config.request_timeout - ) + GatewayClient.instance().init_connection_args() + assert GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == 43 -async def test_gateway_cli_options(jp_configurable_serverapp): +def test_gateway_cli_options(jp_configurable_serverapp, capsys): argv = [ "--gateway-url=" + mock_gateway_url, "--GatewayClient.http_user=" + mock_http_user, "--GatewayClient.connect_timeout=44.4", "--GatewayClient.request_timeout=96.0", + "--GatewayClient.launch_timeout_pad=5.1", + "--GatewayClient.env_whitelist=FOO,BAR", ] GatewayClient.clear_instance() @@ -199,10 +287,136 @@ async def test_gateway_cli_options(jp_configurable_serverapp): assert app.gateway_config.http_user == mock_http_user assert app.gateway_config.connect_timeout == 44.4 assert app.gateway_config.request_timeout == 96.0 - GatewayClient.instance().init_static_args() + assert app.gateway_config.launch_timeout_pad == 5.1 + assert app.gateway_config.gateway_token_renewer_class == NoOpTokenRenewer + assert app.gateway_config.allowed_envs == "FOO,BAR" + captured = capsys.readouterr() assert ( - GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == 96 - ) # Ensure KLT gets set from request-timeout + "env_whitelist is deprecated in jupyter_server 2.0, use GatewayClient.allowed_envs" + in captured.err + ) + gw_client = GatewayClient.instance() + gw_client.init_connection_args() + assert ( + gw_client.KERNEL_LAUNCH_TIMEOUT == 90 + ) # Ensure KLT gets set from request-timeout - launch_timeout_pad + GatewayClient.clear_instance() + + +@pytest.mark.parametrize( + "renewer_type,initial_auth_token", [("default", ""), ("custom", None), ("custom", "")] +) +def test_token_renewer_config( + jp_server_config, jp_configurable_serverapp, renewer_type, initial_auth_token +): + argv = ["--gateway-url=" + mock_gateway_url] + if renewer_type == "custom": + argv.append( + "--GatewayClient.gateway_token_renewer_class=tests.test_gateway.CustomTestTokenRenewer" + ) + if initial_auth_token is None: + argv.append("--GatewayClient.auth_token=None") + + GatewayClient.clear_instance() + app = jp_configurable_serverapp(argv=argv) + + assert app.gateway_config.gateway_enabled is True + assert app.gateway_config.url == mock_gateway_url + gw_client = GatewayClient.instance() + gw_client.init_connection_args() + assert isinstance(gw_client.gateway_token_renewer, GatewayTokenRenewerBase) + if renewer_type == "default": + assert isinstance(gw_client.gateway_token_renewer, NoOpTokenRenewer) + token = gw_client.gateway_token_renewer.get_token( + gw_client.auth_header_key, gw_client.auth_scheme, gw_client.auth_token or "" + ) + assert token == gw_client.auth_token + else: + assert isinstance(gw_client.gateway_token_renewer, CustomTestTokenRenewer) + token = gw_client.gateway_token_renewer.get_token( + gw_client.auth_header_key, gw_client.auth_scheme, gw_client.auth_token or "" + ) + assert token == CustomTestTokenRenewer.TEST_EXPECTED_TOKEN_VALUE + gw_client.load_connection_args() + if renewer_type == "default" or initial_auth_token is None: + assert gw_client.auth_token == initial_auth_token + else: + assert gw_client.auth_token == CustomTestTokenRenewer.TEST_EXPECTED_TOKEN_VALUE + + +@pytest.mark.parametrize( + "request_timeout,kernel_launch_timeout,expected_request_timeout,expected_kernel_launch_timeout", + [(50, 10, 50, 45), (10, 50, 55, 50)], +) +def test_gateway_request_timeout_pad_option( + jp_configurable_serverapp, + monkeypatch, + request_timeout, + kernel_launch_timeout, + expected_request_timeout, + expected_kernel_launch_timeout, +): + argv = [ + f"--GatewayClient.request_timeout={request_timeout}", + "--GatewayClient.launch_timeout_pad=5", + ] + + GatewayClient.clear_instance() + app = jp_configurable_serverapp(argv=argv) + + monkeypatch.setattr(GatewayClient, "KERNEL_LAUNCH_TIMEOUT", kernel_launch_timeout) + GatewayClient.instance().init_connection_args() + + assert app.gateway_config.request_timeout == expected_request_timeout + assert expected_kernel_launch_timeout == GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT + + GatewayClient.clear_instance() + + +cookie_expire_time = format_datetime(datetime.now(tz=timezone.utc) + timedelta(seconds=180)) + + +@pytest.mark.parametrize( + "accept_cookies,expire_arg,expire_param,existing_cookies,cookie_exists", + [ + (False, None, None, "EXISTING=1", False), + (True, None, None, "EXISTING=1", True), + (True, "Expires", cookie_expire_time, None, True), + (True, "Max-Age", "-360", "EXISTING=1", False), + ], +) +def test_gateway_request_with_expiring_cookies( + jp_configurable_serverapp, + accept_cookies, + expire_arg, + expire_param, + existing_cookies, + cookie_exists, +): + argv = [f"--GatewayClient.accept_cookies={accept_cookies}"] + + GatewayClient.clear_instance() + _ = jp_configurable_serverapp(argv=argv) + + cookie: SimpleCookie = SimpleCookie() + cookie.load("SERVERID=1234567; Path=/") + if expire_arg: + cookie["SERVERID"][expire_arg] = expire_param + + GatewayClient.instance().update_cookies(cookie) + + args = {} + if existing_cookies: + args["headers"] = {"Cookie": existing_cookies} + connection_args = GatewayClient.instance().load_connection_args(**args) + + if not cookie_exists: + assert "SERVERID" not in (connection_args["headers"].get("Cookie") or "") + else: + assert "SERVERID" in connection_args["headers"].get("Cookie") + if existing_cookies: + assert "EXISTING" in connection_args["headers"].get("Cookie") + GatewayClient.clear_instance() @@ -213,7 +427,7 @@ async def test_gateway_class_mappings(init_gateway, jp_serverapp): assert jp_serverapp.kernel_spec_manager_class.__name__ == "GatewayKernelSpecManager" -async def test_gateway_get_kernelspecs(init_gateway, jp_fetch): +async def test_gateway_get_kernelspecs(init_gateway, jp_fetch, jp_serverapp): # Validate that kernelspecs come from gateway. with mocked_gateway: r = await jp_fetch("api", "kernelspecs", method="GET") @@ -222,6 +436,9 @@ async def test_gateway_get_kernelspecs(init_gateway, jp_fetch): kspecs = content.get("kernelspecs") assert len(kspecs) == 2 assert kspecs.get("kspec_bar").get("name") == "kspec_bar" + assert ( + kspecs.get("kspec_bar").get("resources")["logo-64x64"].startswith(jp_serverapp.base_url) + ) async def test_gateway_get_named_kernelspec(init_gateway, jp_fetch): @@ -232,72 +449,309 @@ async def test_gateway_get_named_kernelspec(init_gateway, jp_fetch): kspec_foo = json.loads(r.body.decode("utf-8")) assert kspec_foo.get("name") == "kspec_foo" + r = await jp_fetch("kernelspecs", "kspec_foo", "logo-64x64.png", method="GET") + assert r.code == 200 + assert r.body == b"foo" + assert r.headers["content-type"] == "image/png" + with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("api", "kernelspecs", "no_such_spec", method="GET") assert expected_http_error(e, 404) -async def test_gateway_session_lifecycle(init_gateway, jp_root_dir, jp_fetch): +@pytest.mark.parametrize("cull_kernel", [False, True]) +async def test_gateway_session_lifecycle(init_gateway, jp_root_dir, jp_fetch, cull_kernel): # Validate session lifecycle functions; create and delete. # create - session_id, kernel_id = await create_session(jp_root_dir, jp_fetch, "kspec_foo") + session_id, kernel_id = await create_session(jp_fetch, "kspec_foo") # ensure kernel still considered running - assert await is_kernel_running(jp_fetch, kernel_id) is True + assert await is_session_active(jp_fetch, session_id) is True # interrupt await interrupt_kernel(jp_fetch, kernel_id) # ensure kernel still considered running - assert await is_kernel_running(jp_fetch, kernel_id) is True + assert await is_session_active(jp_fetch, session_id) is True # restart await restart_kernel(jp_fetch, kernel_id) - # ensure kernel still considered running - assert await is_kernel_running(jp_fetch, kernel_id) is True + assert await is_session_active(jp_fetch, session_id) is True - # delete - await delete_session(jp_fetch, session_id) - assert await is_kernel_running(jp_fetch, kernel_id) is False + omitted_kernels[kernel_id] = True + if cull_kernel: + running_kernels.pop(kernel_id) + # fetch kernel and session and ensure not considered running + assert await is_kernel_running(jp_fetch, kernel_id) is not cull_kernel + assert await is_session_active(jp_fetch, session_id) is not cull_kernel -async def test_gateway_kernel_lifecycle(init_gateway, jp_fetch): + # delete session. If culled, ensure 404 is raised + if cull_kernel: + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await delete_session(jp_fetch, session_id) + assert expected_http_error(e, 404) + else: + await delete_session(jp_fetch, session_id) + + assert await is_session_active(jp_fetch, session_id) is False + + +@pytest.mark.parametrize("cull_kernel", [False, True]) +async def test_gateway_kernel_lifecycle( + init_gateway, + jp_configurable_serverapp, + jp_read_emitted_events, + jp_event_handler, + jp_ws_fetch, + jp_fetch, + cull_kernel, +): # Validate kernel lifecycle functions; create, interrupt, restart and delete. + app = jp_configurable_serverapp() + app.event_logger.register_handler(jp_event_handler) + # create kernel_id = await create_kernel(jp_fetch, "kspec_bar") + output = jp_read_emitted_events()[0] + assert "action" in output and output["action"] == "start" + assert "msg" in output + assert "kernel_id" in output and kernel_id == output["kernel_id"] + assert "status" in output and output["status"] == "success" + # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True + ws = await jp_ws_fetch("api", "kernels", kernel_id, "channels") + ws.ping() + ws.write_message(b"hi") + ws.on_message(b"hi") + ws.close() + # interrupt await interrupt_kernel(jp_fetch, kernel_id) + output = jp_read_emitted_events()[0] + assert "action" in output and output["action"] == "interrupt" + assert "msg" in output + assert "kernel_id" in output and kernel_id == output["kernel_id"] + assert "status" in output and output["status"] == "success" + # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True # restart await restart_kernel(jp_fetch, kernel_id) + output = jp_read_emitted_events()[0] + assert "action" in output and output["action"] == "restart" + assert "msg" in output + assert "kernel_id" in output and kernel_id == output["kernel_id"] + assert "status" in output and output["status"] == "success" + # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True + omitted_kernels[kernel_id] = True + if cull_kernel: + running_kernels.pop(kernel_id) + + # fetch kernel and session and ensure not considered running + assert await is_kernel_running(jp_fetch, kernel_id) is not cull_kernel + + # delete kernel. If culled, ensure 404 is raised + if cull_kernel: + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await delete_kernel(jp_fetch, kernel_id) + assert expected_http_error(e, 404) + else: + await delete_kernel(jp_fetch, kernel_id) + + output = jp_read_emitted_events()[0] + assert "action" in output and output["action"] == "shutdown" + assert "msg" in output + assert "kernel_id" in output and kernel_id == output["kernel_id"] + assert "status" in output and output["status"] == "success" + + assert await is_kernel_running(jp_fetch, kernel_id) is False + + +@pytest.mark.parametrize("missing_kernel", [True, False]) +async def test_gateway_shutdown(init_gateway, jp_serverapp, jp_fetch, missing_kernel): + # Validate server shutdown when multiple gateway kernels are present or + # we've lost track of at least one (missing) kernel + + # create two kernels + k1 = await create_kernel(jp_fetch, "kspec_bar") + k2 = await create_kernel(jp_fetch, "kspec_bar") + + # ensure they're considered running + assert await is_kernel_running(jp_fetch, k1) is True + assert await is_kernel_running(jp_fetch, k2) is True + + if missing_kernel: + running_kernels.pop(k1) # "terminate" kernel w/o our knowledge + + with mocked_gateway: + await jp_serverapp.kernel_manager.shutdown_all() + + assert await is_kernel_running(jp_fetch, k1) is False + assert await is_kernel_running(jp_fetch, k2) is False + + +@patch("websocket.create_connection", mock_websocket_create_connection(recv_side_effect=Exception)) +async def test_kernel_client_response_router_notifies_channel_queue_when_finished( + init_gateway, jp_serverapp, jp_fetch +): + # create + kernel_id = await create_kernel(jp_fetch, "kspec_bar") + + # get kernel manager + km: GatewayKernelManager = jp_serverapp.kernel_manager.get_kernel(kernel_id) + + # create kernel client + kc = km.client() + + await ensure_async(kc.start_channels()) + + with pytest.raises(RuntimeError): + await kc.iopub_channel.get_msg(timeout=10) + + all_channels = [ + kc.shell_channel, + kc.iopub_channel, + kc.stdin_channel, + kc.hb_channel, + kc.control_channel, + ] + assert all(channel.response_router_finished for channel in all_channels) + + await ensure_async(kc.stop_channels()) + # delete await delete_kernel(jp_fetch, kernel_id) - assert await is_kernel_running(jp_fetch, kernel_id) is False + + +async def test_channel_queue_get_msg_with_invalid_timeout(): + queue = ChannelQueue("iopub", MagicMock(), logging.getLogger()) + + with pytest.raises(ValueError): + await queue.get_msg(timeout=-1) + + +async def test_channel_queue_get_msg_raises_empty_after_timeout(): + queue = ChannelQueue("iopub", MagicMock(), logging.getLogger()) + + with pytest.raises(Empty): + await asyncio.wait_for(queue.get_msg(timeout=0.1), 2) + + +async def test_channel_queue_get_msg_without_timeout(): + queue = ChannelQueue("iopub", MagicMock(), logging.getLogger()) + + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(queue.get_msg(timeout=None), 1) + + +async def test_channel_queue_get_msg_with_existing_item(): + sent_message = {"msg_id": 1, "msg_type": 2} + queue = ChannelQueue("iopub", MagicMock(), logging.getLogger()) + queue.put_nowait(sent_message) + + received_message = await asyncio.wait_for(queue.get_msg(timeout=None), 1) + + assert received_message == sent_message + + +async def test_channel_queue_get_msg_when_response_router_had_finished(): + queue = ChannelQueue("iopub", MagicMock(), logging.getLogger()) + queue.response_router_finished = True + + with pytest.raises(RuntimeError): + await queue.get_msg() + + +class MockWebSocketClientConnection(tornado.websocket.WebSocketClientConnection): + def __init__(self, *args, **kwargs): + self._msgs: Queue = Queue(2) + self._msgs.put_nowait('{"msg_type": "status", "content": {"execution_state": "starting"}}') + + def write_message(self, message, *args, **kwargs): + return self._msgs.put(message) + + def read_message(self, *args, **kwargs): + return self._msgs.get() + + +def mock_websocket_connect(): + def helper(request): + fut: Future = Future() + mock_client = MockWebSocketClientConnection() + fut.set_result(mock_client) + return fut + + return helper + + +@patch("tornado.websocket.websocket_connect", mock_websocket_connect()) +async def test_websocket_connection_closed(init_gateway, jp_serverapp, jp_fetch, caplog): + # Create the kernel and get the kernel manager... + kernel_id = await create_kernel(jp_fetch, "kspec_foo") + km: GatewayKernelManager = jp_serverapp.kernel_manager.get_kernel(kernel_id) + + # Create the KernelWebsocketHandler... + request = HTTPServerRequest("foo", "GET") + request.connection = MagicMock() + handler = KernelWebsocketHandler(jp_serverapp.web_app, request) + + # Force the websocket handler to raise a closed error if we try to write a message + # to the client. + handler.ws_connection = MagicMock() + handler.ws_connection.is_closing = lambda: True + + # Create the GatewayWebSocketConnection and attach it to the handler... + conn = GatewayWebSocketConnection(parent=km, websocket_handler=handler) + handler.connection = conn + await conn.connect() + + # Processing websocket messages happens in separate coroutines and any + # errors in that process will show up in logs, but not bubble up to the + # caller. + # + # To check for these, we wait for the server to stop and then check the + # logs for errors. + await jp_serverapp._cleanup() + for _, level, message in caplog.record_tuples: + if level >= logging.ERROR: + pytest.fail(f"Logs contain an error: {message}") # # Test methods below... # -async def create_session(root_dir, jp_fetch, kernel_name): + + +async def is_session_active(jp_fetch, session_id): + """Issues request to get the set of running kernels""" + with mocked_gateway: + # Get list of running kernels + r = await jp_fetch("api", "sessions", method="GET") + assert r.code == 200 + sessions = json.loads(r.body.decode("utf-8")) + assert len(sessions) == len(running_kernels) # Use running_kernels as truth + return any(model.get("id") == session_id for model in sessions) + + +async def create_session(jp_fetch, kernel_name): """Creates a session for a kernel. The session is created against the server which then uses the gateway for kernel management. """ with mocked_gateway: - nb_path = root_dir / "testgw.ipynb" + nb_path = "/testgw.ipynb" body = json.dumps( {"path": str(nb_path), "type": "notebook", "kernel": {"name": kernel_name}} ) @@ -313,6 +767,7 @@ async def create_session(root_dir, jp_fetch, kernel_name): kernel_id = model.get("kernel").get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(kernel_id) + assert running_kernel is not None assert kernel_id == running_kernel.get("id") assert model.get("kernel").get("name") == running_kernel.get("name") session_id = model.get("id") @@ -339,14 +794,11 @@ async def is_kernel_running(jp_fetch, kernel_id): assert r.code == 200 kernels = json.loads(r.body.decode("utf-8")) assert len(kernels) == len(running_kernels) - for model in kernels: - if model.get("id") == kernel_id: - return True - return False + return any(model.get("id") == kernel_id for model in kernels) async def create_kernel(jp_fetch, kernel_name): - """Issues request to retart the given kernel""" + """Issues request to restart the given kernel""" with mocked_gateway: body = json.dumps({"name": kernel_name}) @@ -359,6 +811,7 @@ async def create_kernel(jp_fetch, kernel_name): kernel_id = model.get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(kernel_id) + assert running_kernel is not None assert kernel_id == running_kernel.get("id") assert model.get("name") == kernel_name @@ -383,7 +836,7 @@ async def interrupt_kernel(jp_fetch, kernel_id): async def restart_kernel(jp_fetch, kernel_id): - """Issues request to retart the given kernel""" + """Issues request to restart the given kernel""" with mocked_gateway: r = await jp_fetch( "api", @@ -398,6 +851,7 @@ async def restart_kernel(jp_fetch, kernel_id): restarted_kernel_id = model.get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(restarted_kernel_id) + assert running_kernel is not None assert restarted_kernel_id == running_kernel.get("id") assert model.get("name") == running_kernel.get("name") diff --git a/tests/test_paths.py b/tests/test_paths.py index 0789be4ded..24f948d4c8 100644 --- a/tests/test_paths.py +++ b/tests/test_paths.py @@ -46,7 +46,6 @@ def test_path_regex_bad(): ], ) async def test_trailing_slash( - jp_ensure_app_fixture, uri, expected, http_server_client, @@ -63,6 +62,7 @@ async def test_trailing_slash( ) # Capture the response from the raised exception value. response = err.value.response + assert response is not None assert response.code == 302 assert "Location" in response.headers assert response.headers["Location"] == url_path_join(jp_base_url, expected) diff --git a/tests/test_serialize.py b/tests/test_serialize.py index 6469097bba..28be8e03b0 100644 --- a/tests/test_serialize.py +++ b/tests/test_serialize.py @@ -3,7 +3,7 @@ from jupyter_client.session import Session -from jupyter_server.base.zmqhandlers import ( +from jupyter_server.services.kernels.connection.base import ( deserialize_binary_message, serialize_binary_message, ) diff --git a/tests/test_serverapp.py b/tests/test_serverapp.py index 89225e50bb..df703f550c 100644 --- a/tests/test_serverapp.py +++ b/tests/test_serverapp.py @@ -1,16 +1,37 @@ import getpass +import json import logging import os import pathlib +import sys +import warnings from unittest.mock import patch import pytest from jupyter_core.application import NoStart from traitlets import TraitError +from traitlets.config import Config from traitlets.tests.utils import check_help_all_output from jupyter_server.auth.security import passwd_check -from jupyter_server.serverapp import JupyterPasswordApp, ServerApp, list_running_servers +from jupyter_server.serverapp import ( + JupyterPasswordApp, + JupyterServerListApp, + ServerApp, + ServerWebApplication, + list_running_servers, + random_ports, +) +from jupyter_server.services.contents.filemanager import ( + AsyncFileContentsManager, + FileContentsManager, +) +from jupyter_server.utils import pathname2url, urljoin + + +@pytest.fixture(params=[FileContentsManager, AsyncFileContentsManager]) +def jp_file_contents_manager_class(request, tmp_path): + return request.param def test_help_output(): @@ -18,6 +39,49 @@ def test_help_output(): check_help_all_output("jupyter_server") +@pytest.mark.parametrize( + "format", + [ + "json", + "jsonlist", + "", + ], +) +def test_server_list(jp_configurable_serverapp, capsys, format): + app = jp_configurable_serverapp(log=logging.getLogger()) + + app.write_server_info_file() + + capsys.readouterr() + listapp = JupyterServerListApp( + parent=app, + ) + if format: + setattr(listapp, format, True) + listapp.start() + captured = capsys.readouterr() + sys.stdout.write(captured.out) + sys.stderr.write(captured.err) + out = captured.out.strip() + + if not format: + assert "Currently running servers:" in out + assert app.connection_url in out + assert len(out.splitlines()) == 2 + return + + if format == "jsonlist": + servers = json.loads(out) + elif format == "json": + servers = [json.loads(line) for line in out.splitlines()] + assert len(servers) == 1 + sinfo = servers[0] + + assert sinfo["port"] == app.port + assert sinfo["url"] == app.connection_url + assert sinfo["version"] == app.version + + def test_server_info_file(tmp_path, jp_configurable_serverapp): app = jp_configurable_serverapp(log=logging.getLogger()) @@ -34,7 +98,7 @@ def test_server_info_file(tmp_path, jp_configurable_serverapp): app.remove_server_info_file() assert list(list_running_servers(app.runtime_dir)) == [] - app.remove_server_info_file + app.remove_server_info_file() def test_root_dir(tmp_path, jp_configurable_serverapp): @@ -77,7 +141,7 @@ def test_valid_root_dir(valid_root_dir, jp_configurable_serverapp): assert app.root_dir == root_dir -def test_generate_config(tmp_path, jp_configurable_serverapp): +async def test_generate_config(tmp_path, jp_configurable_serverapp): app = jp_configurable_serverapp(config_dir=str(tmp_path)) app.initialize(["--generate-config", "--allow-root"]) with pytest.raises(NoStart): @@ -95,8 +159,8 @@ def test_server_password(tmp_path, jp_configurable_serverapp): app.start() sv = jp_configurable_serverapp() sv.load_config_file() - assert sv.password != "" - passwd_check(sv.password, password) + assert sv.identity_provider.hashed_password != "" + passwd_check(sv.identity_provider.hashed_password, password) def test_list_running_servers(jp_serverapp, jp_web_app): @@ -104,7 +168,7 @@ def test_list_running_servers(jp_serverapp, jp_web_app): assert len(servers) >= 1 -@pytest.fixture +@pytest.fixture() def prefix_path(jp_root_dir, tmp_path): """If a given path is prefixed with the literal strings `/jp_root_dir` or `/tmp_path`, replace those @@ -239,12 +303,14 @@ def test_urls(config, public_url, local_url, connection_url): # Verify we're working with a clean instance. ServerApp.clear_instance() serverapp = ServerApp.instance(**config) + serverapp.init_configurables() + token = serverapp.identity_provider.token # If a token is generated (not set by config), update # expected_url with token. - if serverapp._token_generated: - public_url = public_url.replace("", serverapp.token) - local_url = local_url.replace("", serverapp.token) - connection_url = connection_url.replace("", serverapp.token) + if serverapp.identity_provider.token_generated: + public_url = public_url.replace("", token) + local_url = local_url.replace("", token) + connection_url = connection_url.replace("", token) assert serverapp.public_url == public_url assert serverapp.local_url == local_url assert serverapp.connection_url == connection_url @@ -254,14 +320,18 @@ def test_urls(config, public_url, local_url, connection_url): # Preferred dir tests # ---------------------------------------------------------------------------- +@pytest.mark.filterwarnings("ignore::FutureWarning") def test_valid_preferred_dir(tmp_path, jp_configurable_serverapp): path = str(tmp_path) app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) assert app.root_dir == path assert app.preferred_dir == path assert app.root_dir == app.preferred_dir + assert app.contents_manager.root_dir == path + assert app.contents_manager.preferred_dir == "" +@pytest.mark.filterwarnings("ignore::FutureWarning") def test_valid_preferred_dir_is_root_subdir(tmp_path, jp_configurable_serverapp): path = str(tmp_path) path_subdir = str(tmp_path / "subdir") @@ -270,17 +340,114 @@ def test_valid_preferred_dir_is_root_subdir(tmp_path, jp_configurable_serverapp) assert app.root_dir == path assert app.preferred_dir == path_subdir assert app.preferred_dir.startswith(app.root_dir) + assert app.contents_manager.preferred_dir == "subdir" def test_valid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp): path = str(tmp_path) path_subdir = str(tmp_path / "subdir") with pytest.raises(TraitError) as error: - app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) + jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) assert "No such preferred dir:" in str(error) +@pytest.mark.filterwarnings("ignore::DeprecationWarning") +def test_preferred_dir_validation_sync_regression( + tmp_path, jp_configurable_serverapp, jp_file_contents_manager_class +): + path = str(tmp_path) + path_subdir = str(tmp_path / "subdir") + os.makedirs(path_subdir, exist_ok=True) + app = jp_configurable_serverapp( + root_dir=path, + contents_manager_class=jp_file_contents_manager_class, + ) + app.contents_manager.preferred_dir = path_subdir + assert app.preferred_dir == path_subdir + assert app.preferred_dir.startswith(app.root_dir) + assert app.contents_manager.preferred_dir == "subdir" + + +# This tests some deprecated behavior as well +@pytest.mark.filterwarnings("ignore::FutureWarning") +@pytest.mark.parametrize( + "root_dir_loc,preferred_dir_loc,config_target", + [ + ("cli", "cli", "ServerApp"), + ("cli", "cli", "FileContentsManager"), + ("cli", "config", "ServerApp"), + ("cli", "config", "FileContentsManager"), + ("cli", "default", "ServerApp"), + ("cli", "default", "FileContentsManager"), + ("config", "cli", "ServerApp"), + ("config", "cli", "FileContentsManager"), + ("config", "config", "ServerApp"), + ("config", "config", "FileContentsManager"), + ("config", "default", "ServerApp"), + ("config", "default", "FileContentsManager"), + ("default", "cli", "ServerApp"), + ("default", "cli", "FileContentsManager"), + ("default", "config", "ServerApp"), + ("default", "config", "FileContentsManager"), + ("default", "default", "ServerApp"), + ("default", "default", "FileContentsManager"), + ], +) +def test_preferred_dir_validation( + root_dir_loc, + preferred_dir_loc, + config_target, + tmp_path, + jp_config_dir, + jp_configurable_serverapp, +): + expected_root_dir = str(tmp_path) + + os_preferred_dir = str(tmp_path / "subdir") + os.makedirs(os_preferred_dir, exist_ok=True) + config_preferred_dir = os_preferred_dir if config_target == "ServerApp" else "subdir" + config_preferred_dir = config_preferred_dir + "/" # add trailing slash to ensure it is removed + expected_preferred_dir = "subdir" + + argv = [] + kwargs = {"root_dir": None} + + config_lines = [] + config_file = None + if root_dir_loc == "config" or preferred_dir_loc == "config": + config_file = jp_config_dir.joinpath("jupyter_server_config.py") + + if root_dir_loc == "cli": + argv.append(f"--{config_target}.root_dir={expected_root_dir}") + if root_dir_loc == "config": + config_lines.append(f'c.{config_target}.root_dir = r"{expected_root_dir}"') + if root_dir_loc == "default": + expected_root_dir = os.getcwd() + + if preferred_dir_loc == "cli": + argv.append(f"--{config_target}.preferred_dir={config_preferred_dir}") + if preferred_dir_loc == "config": + config_lines.append(f'c.{config_target}.preferred_dir = r"{config_preferred_dir}"') + if preferred_dir_loc == "default": + expected_preferred_dir = "" + + if config_file is not None: + config_file.write_text("\n".join(config_lines)) + + if argv: + kwargs["argv"] = argv # type:ignore[assignment] + + if root_dir_loc == "default" and preferred_dir_loc != "default": # error expected + with pytest.raises(SystemExit): + jp_configurable_serverapp(**kwargs) + else: + app = jp_configurable_serverapp(**kwargs) + assert app.contents_manager.root_dir == expected_root_dir + assert app.contents_manager.preferred_dir == expected_preferred_dir + assert ".." not in expected_preferred_dir + + def test_invalid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp): path = str(tmp_path) path_subdir = str(tmp_path / "subdir") @@ -301,42 +468,152 @@ def test_invalid_preferred_dir_does_not_exist_set(tmp_path, jp_configurable_serv assert "No such preferred dir:" in str(error) +@pytest.mark.filterwarnings("ignore::FutureWarning") def test_invalid_preferred_dir_not_root_subdir(tmp_path, jp_configurable_serverapp): path = str(tmp_path / "subdir") os.makedirs(path, exist_ok=True) not_subdir_path = str(tmp_path) - with pytest.raises(TraitError) as error: - app = jp_configurable_serverapp(root_dir=path, preferred_dir=not_subdir_path) + with pytest.raises(SystemExit): + jp_configurable_serverapp(root_dir=path, preferred_dir=not_subdir_path) - assert "preferred_dir must be equal or a subdir of root_dir:" in str(error) - -def test_invalid_preferred_dir_not_root_subdir_set(tmp_path, jp_configurable_serverapp): +async def test_invalid_preferred_dir_not_root_subdir_set(tmp_path, jp_configurable_serverapp): path = str(tmp_path / "subdir") os.makedirs(path, exist_ok=True) - not_subdir_path = str(tmp_path) + not_subdir_path = os.path.relpath(tmp_path, path) app = jp_configurable_serverapp(root_dir=path) with pytest.raises(TraitError) as error: - app.preferred_dir = not_subdir_path + app.contents_manager.preferred_dir = not_subdir_path - assert "preferred_dir must be equal or a subdir of root_dir:" in str(error) + assert "is outside root contents directory" in str(error.value) -def test_observed_root_dir_updates_preferred_dir(tmp_path, jp_configurable_serverapp): - path = str(tmp_path) - new_path = str(tmp_path / "subdir") - os.makedirs(new_path, exist_ok=True) +async def test_absolute_preferred_dir_not_root_subdir_set(tmp_path, jp_configurable_serverapp): + path = str(tmp_path / "subdir") + os.makedirs(path, exist_ok=True) + not_subdir_path = str(tmp_path) - app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) - app.root_dir = new_path - assert app.preferred_dir == new_path + app = jp_configurable_serverapp(root_dir=path) + with pytest.raises(TraitError) as error: + app.contents_manager.preferred_dir = not_subdir_path -def test_observed_root_dir_does_not_update_preferred_dir(tmp_path, jp_configurable_serverapp): - path = str(tmp_path) - new_path = str(tmp_path.parent) - app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) - app.root_dir = new_path - assert app.preferred_dir == path + if os.name == "nt": + assert "is not a relative API path" in str(error.value) + else: + assert "Preferred directory not found" in str(error.value) + + +def test_random_ports(): + ports = list(random_ports(500, 50)) + assert len(ports) == 50 + + +def test_server_web_application(jp_serverapp): + server: ServerApp = jp_serverapp + server.default_url = "/foo" + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + app = ServerWebApplication( + server, + [], + server.kernel_manager, + server.contents_manager, + server.session_manager, + server.kernel_manager, + server.config_manager, + server.event_logger, + [], + server.log, + server.base_url, + server.default_url, + {}, + {}, + ) + app.init_handlers([], app.settings) + + +def test_misc(jp_serverapp, tmp_path): + app: ServerApp = jp_serverapp + assert app.terminals_enabled is True + app.extra_args = [str(tmp_path)] + app.parse_command_line([]) + + +def test_deprecated_props(jp_serverapp, tmp_path): + app: ServerApp = jp_serverapp + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + app.cookie_options = dict(foo=1) + app.get_secure_cookie_kwargs = dict(bar=1) + app.notebook_dir = str(tmp_path) + app.server_extensions = dict(foo=True) + app.kernel_ws_protocol = "foo" + app.limit_rate = True + app.iopub_msg_rate_limit = 10 + app.iopub_data_rate_limit = 10 + app.rate_limit_window = 10 + with pytest.raises(SystemExit): + app.pylab = "foo" + + +def test_signals(jp_serverapp): + app: ServerApp = jp_serverapp + app.answer_yes = True + app._restore_sigint_handler() + app._handle_sigint(None, None) + app._confirm_exit() + app._signal_info(None, None) + + +async def test_shutdown_no_activity(jp_serverapp): + app: ServerApp = jp_serverapp + app.extension_manager.extensions = {} + app.exit = lambda _: None # type:ignore[assignment,misc] + app.shutdown_no_activity() + app.shutdown_no_activity_timeout = 1 + app.init_shutdown_no_activity() + + +def test_running_server_info(jp_serverapp): + app: ServerApp = jp_serverapp + app.running_server_info(True) + + +@pytest.mark.parametrize("should_exist", [True, False]) +def test_browser_open_files(jp_configurable_serverapp, should_exist, caplog): + app = jp_configurable_serverapp(no_browser_open_file=not should_exist) + assert os.path.exists(app.browser_open_file) == should_exist + url = urljoin("file:", pathname2url(app.browser_open_file)) + url_messages = [rec.message for rec in caplog.records if url in rec.message] + assert url_messages if should_exist else not url_messages + + +def test_deprecated_notebook_dir_priority(jp_configurable_serverapp, tmp_path): + notebook_dir = tmp_path / "notebook" + notebook_dir.mkdir() + cli_dir = tmp_path / "cli" + cli_dir.mkdir() + + app = jp_configurable_serverapp(argv=[str(cli_dir)], root_dir=None) + assert app._root_dir_set + + # simulate delayed loading of notebook_dir config + # this should _not_ take priority over an explicitly set root_dir + # as done by notebook_shim + cfg = Config() + cfg.ServerApp.notebook_dir = str(notebook_dir) + app.update_config(cfg) + assert app.root_dir == str(cli_dir) + + +def test_immutable_cache_trait(): + # Verify we're working with a clean instance. + ServerApp.clear_instance() + kwargs = {"static_immutable_cache": "/test/immutable"} + serverapp = ServerApp.instance(**kwargs) + serverapp.init_configurables() + serverapp.init_webapp() + assert serverapp.web_app.settings["static_immutable_cache"] == ["/test/immutable"] diff --git a/tests/test_terminal.py b/tests/test_terminal.py index d1eef0b1d4..27fd69fadb 100644 --- a/tests/test_terminal.py +++ b/tests/test_terminal.py @@ -1,15 +1,20 @@ import asyncio import json import os +import shlex import shutil import sys +import warnings import pytest +from flaky import flaky # type:ignore[import-untyped] from tornado.httpclient import HTTPClientError from traitlets.config import Config +from jupyter_server._tz import isoformat -@pytest.fixture + +@pytest.fixture() def terminal_path(tmp_path): subdir = tmp_path.joinpath("terminal_path") subdir.mkdir() @@ -19,7 +24,7 @@ def terminal_path(tmp_path): shutil.rmtree(str(subdir), ignore_errors=True) -@pytest.fixture +@pytest.fixture() def terminal_root_dir(jp_root_dir): subdir = jp_root_dir.joinpath("terminal_path") subdir.mkdir() @@ -33,7 +38,7 @@ def terminal_root_dir(jp_root_dir): CULL_INTERVAL = 3 -@pytest.fixture +@pytest.fixture() def jp_server_config(): return Config( { @@ -47,6 +52,12 @@ def jp_server_config(): ) +@pytest.fixture() +def jp_argv(): + """Allows tests to setup specific argv values.""" + return ["--ServerApp.jpserver_extensions", "jupyter_server_terminals=True"] + + async def test_no_terminals(jp_fetch): resp_list = await jp_fetch( "api", @@ -60,7 +71,7 @@ async def test_no_terminals(jp_fetch): assert len(data) == 0 -async def test_terminal_create(jp_fetch, jp_cleanup_subprocesses): +async def test_terminal_create(jp_fetch, jp_serverapp): resp = await jp_fetch( "api", "terminals", @@ -80,13 +91,10 @@ async def test_terminal_create(jp_fetch, jp_cleanup_subprocesses): data = json.loads(resp_list.body.decode()) assert len(data) == 1 - assert data[0] == term - await jp_cleanup_subprocesses() + assert data[0]["name"] == term["name"] -async def test_terminal_create_with_kwargs( - jp_fetch, jp_ws_fetch, terminal_path, jp_cleanup_subprocesses -): +async def test_terminal_create_with_kwargs(jp_fetch, jp_ws_fetch, terminal_path): resp_create = await jp_fetch( "api", "terminals", @@ -109,12 +117,9 @@ async def test_terminal_create_with_kwargs( data = json.loads(resp_get.body.decode()) assert data["name"] == term_name - await jp_cleanup_subprocesses() -async def test_terminal_create_with_cwd( - jp_fetch, jp_ws_fetch, terminal_path, jp_cleanup_subprocesses -): +async def test_terminal_create_with_cwd(jp_fetch, jp_ws_fetch, terminal_path, jp_serverapp): resp = await jp_fetch( "api", "terminals", @@ -145,11 +150,17 @@ async def test_terminal_create_with_cwd( ws.close() assert os.path.basename(terminal_path) in message_stdout - await jp_cleanup_subprocesses() + + resp = await jp_fetch("api", "status") + data = json.loads(resp.body.decode()) + assert data["last_activity"] == isoformat( + jp_serverapp.web_app.settings["terminal_last_activity"] + ) +@pytest.mark.skip(reason="Not yet working") async def test_terminal_create_with_relative_cwd( - jp_fetch, jp_ws_fetch, jp_root_dir, terminal_root_dir, jp_cleanup_subprocesses + jp_fetch, jp_ws_fetch, jp_root_dir, terminal_root_dir ): resp = await jp_fetch( "api", @@ -182,10 +193,10 @@ async def test_terminal_create_with_relative_cwd( expected = terminal_root_dir.name if sys.platform == "win32" else str(terminal_root_dir) assert expected in message_stdout - await jp_cleanup_subprocesses() -async def test_terminal_create_with_bad_cwd(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): +@pytest.mark.skip(reason="Not yet working") +async def test_terminal_create_with_bad_cwd(jp_fetch, jp_ws_fetch): non_existing_path = "/tmp/path/to/nowhere" resp = await jp_fetch( "api", @@ -217,19 +228,22 @@ async def test_terminal_create_with_bad_cwd(jp_fetch, jp_ws_fetch, jp_cleanup_su ws.close() assert non_existing_path not in message_stdout - await jp_cleanup_subprocesses() -async def test_culling_config(jp_server_config, jp_configurable_serverapp): - terminal_mgr_config = jp_configurable_serverapp().config.ServerApp.TerminalManager +@flaky +def test_culling_config(jp_server_config, jp_configurable_serverapp): + app = jp_configurable_serverapp() + terminal_mgr_config = app.config.ServerApp.TerminalManager assert terminal_mgr_config.cull_inactive_timeout == CULL_TIMEOUT assert terminal_mgr_config.cull_interval == CULL_INTERVAL - terminal_mgr_settings = jp_configurable_serverapp().web_app.settings["terminal_manager"] + app = jp_configurable_serverapp() + terminal_mgr_settings = app.web_app.settings["terminal_manager"] assert terminal_mgr_settings.cull_inactive_timeout == CULL_TIMEOUT assert terminal_mgr_settings.cull_interval == CULL_INTERVAL -async def test_culling(jp_server_config, jp_fetch, jp_cleanup_subprocesses): +@flaky +async def test_culling(jp_server_config, jp_fetch): # POST request resp = await jp_fetch( "api", @@ -259,4 +273,34 @@ async def test_culling(jp_server_config, jp_fetch, jp_cleanup_subprocesses): await asyncio.sleep(1) assert culled - await jp_cleanup_subprocesses() + + +@pytest.mark.parametrize( + "terminado_settings,expected_shell,min_traitlets", + [ + ("shell_command=\"['/path/to/shell', '-l']\"", ["/path/to/shell", "-l"], "5.4"), + ('shell_command="/string/path/to/shell -l"', ["/string/path/to/shell", "-l"], "5.1"), + ], +) +def test_shell_command_override( + terminado_settings, expected_shell, min_traitlets, jp_configurable_serverapp +): + pytest.importorskip("traitlets", minversion=min_traitlets) + argv = shlex.split(f"--ServerApp.terminado_settings={terminado_settings}") + app = jp_configurable_serverapp(argv=argv) + if os.name == "nt": + assert app.web_app.settings["terminal_manager"].shell_command in ( + expected_shell, + " ".join(expected_shell), + ) + else: + assert app.web_app.settings["terminal_manager"].shell_command == expected_shell + + +def test_importing_shims(): + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + from jupyter_server.terminal import initialize + from jupyter_server.terminal.api_handlers import TerminalRootHandler + from jupyter_server.terminal.handlers import TermSocket + from jupyter_server.terminal.terminalmanager import TerminalManager diff --git a/tests/test_traittypes.py b/tests/test_traittypes.py index 7d9027c9f3..95574f649d 100644 --- a/tests/test_traittypes.py +++ b/tests/test_traittypes.py @@ -15,7 +15,6 @@ class DummyInt(int): class Thing(HasTraits): - a = InstanceFromClasses( default_value=2, klasses=[ diff --git a/tests/test_utils.py b/tests/test_utils.py index 5c5d16e73d..83d2a1d926 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,10 +1,28 @@ +import os +import socket +import subprocess +import sys +import uuid +import warnings from pathlib import Path from unittest.mock import patch import pytest from traitlets.tests.utils import check_help_all_output -from jupyter_server.utils import is_namespace_package, url_escape, url_unescape +from jupyter_server.utils import ( + check_pid, + check_version, + is_namespace_package, + path2url, + run_sync_in_loop, + samefile_simple, + to_api_path, + unix_socket_in_use, + url2path, + url_escape, + url_unescape, +) def test_help_output(): @@ -59,3 +77,51 @@ def test_is_namespace_package_no_spec(): assert is_namespace_package("dummy") is None mocked_spec.assert_called_once_with("dummy") + + +@pytest.mark.skipif(os.name == "nt", reason="Paths are annoying on Windows") +def test_path_utils(tmp_path): + path = str(tmp_path) + assert os.path.basename(path2url(path)) == os.path.basename(path) + + url = path2url(path) + assert path.endswith(url2path(url)) + + assert samefile_simple(path, path) + + assert to_api_path(path, os.path.dirname(path)) == os.path.basename(path) + + +def test_check_version(): + assert check_version("1.0.2", "1.0.1") + assert not check_version("1.0.0", "1.0.1") + assert check_version(1.0, "1.0.1") # type:ignore[arg-type] + + +def test_check_pid(): + proc = subprocess.Popen([sys.executable]) + proc.kill() + proc.wait() + check_pid(proc.pid) + + +async def test_run_sync_in_loop(): + async def foo(): + pass + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + await run_sync_in_loop(foo()) + + +@pytest.mark.skipif(os.name != "posix", reason="Requires unix sockets") +def test_unix_socket_in_use(tmp_path): + root_tmp_dir = Path("/tmp").resolve() + server_address = os.path.join(root_tmp_dir, uuid.uuid4().hex) + if os.path.exists(server_address): + os.remove(server_address) + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.bind(server_address) + sock.listen(0) + assert unix_socket_in_use(server_address) + sock.close() diff --git a/tests/unix_sockets/conftest.py b/tests/unix_sockets/conftest.py index c9324d226d..6eb4daa03a 100644 --- a/tests/unix_sockets/conftest.py +++ b/tests/unix_sockets/conftest.py @@ -6,13 +6,13 @@ from jupyter_server import DEFAULT_JUPYTER_SERVER_PORT -@pytest.fixture +@pytest.fixture() def jp_process_id(): """Choose a random unused process ID.""" return os.getpid() -@pytest.fixture +@pytest.fixture() def jp_unix_socket_file(jp_process_id): """Define a temporary socket connection""" # Rely on `/tmp` to avoid any Linux socket length max buffer @@ -26,7 +26,7 @@ def jp_unix_socket_file(jp_process_id): jp_unix_socket_file.unlink() -@pytest.fixture +@pytest.fixture() def jp_http_port(): """Set the port to the default value, since sock and port cannot both be configured at the same time. diff --git a/tests/unix_sockets/test_api.py b/tests/unix_sockets/test_api.py index 85714f4a52..fb6358860f 100644 --- a/tests/unix_sockets/test_api.py +++ b/tests/unix_sockets/test_api.py @@ -16,13 +16,13 @@ from jupyter_server.utils import async_fetch, url_path_join, urlencode_unix_socket -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_unix_socket_file): """Configure the serverapp fixture with the unix socket.""" return {"ServerApp": {"sock": jp_unix_socket_file, "allow_remote_access": True}} -@pytest.fixture +@pytest.fixture() def http_server_port(jp_unix_socket_file, jp_process_id): """Unix socket and process ID used by tornado's HTTP Server. @@ -32,7 +32,7 @@ def http_server_port(jp_unix_socket_file, jp_process_id): return (bind_unix_socket(jp_unix_socket_file), jp_process_id) -@pytest.fixture +@pytest.fixture() def jp_unix_socket_fetch(jp_unix_socket_file, jp_auth_header, jp_base_url, http_server, io_loop): """A fetch fixture for Jupyter Server tests that use the unix_serverapp fixture""" diff --git a/tests/unix_sockets/test_serverapp_integration.py b/tests/unix_sockets/test_serverapp_integration.py index 5bb1038234..392fd7a61a 100644 --- a/tests/unix_sockets/test_serverapp_integration.py +++ b/tests/unix_sockets/test_serverapp_integration.py @@ -1,18 +1,36 @@ +import os +import platform +import shlex import stat +import subprocess import sys +import time import pytest +from jupyter_server.serverapp import ( + JupyterServerListApp, + JupyterServerStopApp, + list_running_servers, + shutdown_server, +) +from jupyter_server.utils import urlencode_unix_socket, urlencode_unix_socket_path + # Skip this module if on Windows. Unix sockets are not available on Windows. pytestmark = pytest.mark.skipif( - sys.platform.startswith("win"), reason="Unix sockets are not available on Windows." + sys.platform.startswith("win") or platform.python_implementation() == "PyPy", + reason="Unix sockets are not supported.", ) -import os -import subprocess -import time -from jupyter_server.utils import urlencode_unix_socket, urlencode_unix_socket_path +def _check_output(cmd, *args, **kwargs): + if isinstance(cmd, str): + cmd = shlex.split(cmd) + kwargs.setdefault("stderr", subprocess.STDOUT) + output = subprocess.check_output(cmd, *args, **kwargs) + if not isinstance(output, str): + output = output.decode("utf-8") + return output def _cleanup_process(proc): @@ -24,7 +42,7 @@ def _cleanup_process(proc): fid.close() -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_shutdown_sock_server_integration(jp_unix_socket_file): url = urlencode_unix_socket(jp_unix_socket_file).encode() encoded_sock_path = urlencode_unix_socket_path(jp_unix_socket_file) @@ -35,6 +53,7 @@ def test_shutdown_sock_server_integration(jp_unix_socket_file): ) complete = False + assert p.stderr is not None for line in iter(p.stderr.readline, b""): if url in line: complete = True @@ -42,47 +61,49 @@ def test_shutdown_sock_server_integration(jp_unix_socket_file): assert complete, "did not find socket URL in stdout when launching notebook" - socket_path = encoded_sock_path.encode() - assert socket_path in subprocess.check_output(["jupyter-server", "list"]) + assert encoded_sock_path in _check_output("jupyter-server list") # Ensure umask is properly applied. assert stat.S_IMODE(os.lstat(jp_unix_socket_file).st_mode) == 0o700 try: - subprocess.check_output(["jupyter-server", "stop"], stderr=subprocess.STDOUT) + _check_output("jupyter-server stop") except subprocess.CalledProcessError as e: assert "There is currently no server running on" in e.output.decode() else: - raise AssertionError("expected stop command to fail due to target mis-match") + raise AssertionError("expected stop command to fail due to target mismatch") + + assert encoded_sock_path in _check_output("jupyter-server list") - assert encoded_sock_path.encode() in subprocess.check_output(["jupyter-server", "list"]) + # Fake out stopping the server. + app = JupyterServerStopApp(sock=str(jp_unix_socket_file)) + app.initialize([]) + app.shutdown_server = lambda _: True # type:ignore[method-assign] + app._maybe_remove_unix_socket = lambda _: _ # type: ignore[method-assign] + app.start() - subprocess.check_output(["jupyter-server", "stop", jp_unix_socket_file]) + _check_output(["jupyter-server", "stop", jp_unix_socket_file]) - assert encoded_sock_path.encode() not in subprocess.check_output(["jupyter-server", "list"]) + assert encoded_sock_path not in _check_output(["jupyter-server", "list"]) _cleanup_process(p) -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_sock_server_validate_sockmode_type(): try: - subprocess.check_output( - ["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=badbadbad"], - stderr=subprocess.STDOUT, - ) + _check_output(["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=badbadbad"]) except subprocess.CalledProcessError as e: assert "badbadbad" in e.output.decode() else: raise AssertionError("expected execution to fail due to validation of --sock-mode param") -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_sock_server_validate_sockmode_accessible(): try: - subprocess.check_output( + _check_output( ["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=0444"], - stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: assert "0444" in e.output.decode() @@ -92,14 +113,14 @@ def test_sock_server_validate_sockmode_accessible(): def _ensure_stopped(check_msg="There are no running servers"): try: - subprocess.check_output(["jupyter-server", "stop"], stderr=subprocess.STDOUT) + _check_output(["jupyter-server", "stop"]) except subprocess.CalledProcessError as e: assert check_msg in e.output.decode() else: raise AssertionError("expected all servers to be stopped") -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_stop_multi_integration(jp_unix_socket_file, jp_http_port): """Tests lifecycle behavior for mixed-mode server types w/ default ports. @@ -122,26 +143,22 @@ def test_stop_multi_integration(jp_unix_socket_file, jp_http_port): time.sleep(3) shutdown_msg = MSG_TMPL.format(jp_http_port) - assert shutdown_msg in subprocess.check_output(["jupyter-server", "stop"]).decode() + assert shutdown_msg in _check_output(["jupyter-server", "stop"]) _ensure_stopped("There is currently no server running on 8888") - assert ( - MSG_TMPL.format(jp_unix_socket_file) - in subprocess.check_output(["jupyter-server", "stop", jp_unix_socket_file]).decode() + assert MSG_TMPL.format(jp_unix_socket_file) in _check_output( + ["jupyter-server", "stop", jp_unix_socket_file] ) - assert ( - MSG_TMPL.format(TEST_PORT) - in subprocess.check_output(["jupyter-server", "stop", TEST_PORT]).decode() - ) + assert MSG_TMPL.format(TEST_PORT) in _check_output(["jupyter-server", "stop", TEST_PORT]) _ensure_stopped() [_cleanup_process(p) for p in [p1, p2, p3]] -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_launch_socket_collision(jp_unix_socket_file): """Tests UNIX socket in-use detection for lifecycle correctness.""" sock = jp_unix_socket_file @@ -156,11 +173,11 @@ def test_launch_socket_collision(jp_unix_socket_file): # Try to start a server bound to the same UNIX socket. try: - subprocess.check_output(cmd, stderr=subprocess.STDOUT) + _check_output(cmd) except subprocess.CalledProcessError as cpe: assert check_msg in cpe.output.decode() except Exception as ex: - raise AssertionError(f"expected 'already in use' error, got '{ex}'!") + raise AssertionError(f"expected 'already in use' error, got '{ex}'!") from ex else: raise AssertionError("expected 'already in use' error, got success instead!") @@ -170,3 +187,67 @@ def test_launch_socket_collision(jp_unix_socket_file): _ensure_stopped() _cleanup_process(p1) + + +@pytest.mark.integration_test() +def test_shutdown_server(jp_environ): + # Start a server in another process + # Stop that server + import subprocess + + from jupyter_client.connect import LocalPortCache + + port = LocalPortCache().find_available_port("localhost") + p = subprocess.Popen(["jupyter-server", f"--port={port}"]) + servers = [] + while 1: + servers = list(list_running_servers()) + if len(servers): + break + time.sleep(0.1) + while 1: + try: + shutdown_server(servers[0]) + break + except ConnectionRefusedError: + time.sleep(0.1) + _cleanup_process(p) + + +@pytest.mark.integration_test() +def test_jupyter_server_apps(jp_environ): + # Start a server in another process + # Stop that server + import subprocess + + from jupyter_client.connect import LocalPortCache + + port = LocalPortCache().find_available_port("localhost") + p = subprocess.Popen(["jupyter-server", f"--port={port}"]) + servers = [] + while 1: + servers = list(list_running_servers()) + if len(servers): + break + time.sleep(0.1) + + app = JupyterServerListApp() + app.initialize([]) + app.jsonlist = True + app.start() + app.jsonlist = False + app.json = True + app.start() + app.json = False + app.start() + + stop_app = JupyterServerStopApp() + stop_app.initialize([]) + stop_app.port = port + while 1: + try: + stop_app.start() + break + except ConnectionRefusedError: + time.sleep(0.1) + _cleanup_process(p) diff --git a/tests/utils.py b/tests/utils.py index 6e6649af42..0ca9c008f2 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,6 +1,8 @@ import json +from typing import NewType -import tornado +from tornado.httpclient import HTTPClientError +from tornado.web import HTTPError some_resource = "The very model of a modern major general" @@ -9,6 +11,8 @@ "display_name": "Test kernel", } +ApiPath = NewType("ApiPath", str) + def mkdir(tmp_path, *parts): path = tmp_path.joinpath(*parts) @@ -20,7 +24,7 @@ def mkdir(tmp_path, *parts): def expected_http_error(error, expected_code, expected_message=None): """Check that the error matches the expected output error.""" e = error.value - if isinstance(e, tornado.web.HTTPError): + if isinstance(e, HTTPError): if expected_code != e.status_code: return False if expected_message is not None and expected_message != str(e): @@ -28,8 +32,8 @@ def expected_http_error(error, expected_code, expected_message=None): return True elif any( [ - isinstance(e, tornado.httpclient.HTTPClientError), - isinstance(e, tornado.httpclient.HTTPError), + isinstance(e, HTTPClientError), + isinstance(e, HTTPError), ] ): if expected_code != e.code: